➕ Add vendor
This commit is contained in:
parent
a399713787
commit
c023c9ae8c
|
@ -0,0 +1,23 @@
|
|||
FROM golang:latest
|
||||
MAINTAINER Liang Ding <d@b3log.org>
|
||||
|
||||
ENV GOROOT /usr/local/go
|
||||
|
||||
RUN apt-get update && apt-get install bzip2 zip unzip && cp -r /usr/local/go /usr/local/gobt
|
||||
ENV GOROOT_BOOTSTRAP=/usr/local/gobt
|
||||
|
||||
ADD . /wide/gogogo/src/github.com/b3log/wide
|
||||
|
||||
RUN useradd wide && useradd runner
|
||||
|
||||
ENV GOPATH /wide/gogogo
|
||||
|
||||
RUN go build github.com/go-fsnotify/fsnotify\
|
||||
&& go build github.com/gorilla/sessions\
|
||||
&& go build github.com/gorilla/websocket\
|
||||
&& go install github.com/visualfc/gotools github.com/nsf/gocode github.com/bradfitz/goimports
|
||||
|
||||
WORKDIR /wide/gogogo/src/github.com/b3log/wide
|
||||
RUN go build -v
|
||||
|
||||
EXPOSE 7070
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,180 @@
|
|||
# [Wide](https://github.com/b3log/wide) [![Build Status](https://img.shields.io/travis/b3log/wide.svg?style=flat)](https://travis-ci.org/b3log/wide) [![Go Report Card](https://goreportcard.com/badge/github.com/b3log/wide)](https://goreportcard.com/report/github.com/b3log/wide) [![Coverage Status](https://img.shields.io/coveralls/b3log/wide.svg?style=flat)](https://coveralls.io/r/b3log/wide) [![Apache License](https://img.shields.io/badge/license-apache2-orange.svg?style=flat)](https://www.apache.org/licenses/LICENSE-2.0) [![API Documentation](https://img.shields.io/badge/godoc-reference-blue.svg?style=flat)](https://godoc.org/github.com/b3log/wide) [![Download](https://img.shields.io/badge/download-~4.3K-red.svg?style=flat)](https://pan.baidu.com/s/1dD3XwOT)
|
||||
|
||||
_Have a [try](https://wide.b3log.org/signup) first, then [download](https://pan.baidu.com/s/1dD3XwOT) and setup it on your local area network, enjoy yourself!_
|
||||
|
||||
先试试我们搭建好的[在线服务](https://wide.b3log.org/signup),你可以在这里[下载](https://pan.baidu.com/s/1dD3XwOT)并在本地环境运行,然后邀请小伙伴们来玩吧!
|
||||
|
||||
> * 关于 Wide 的产品定位,请看[这里](https://hacpai.com/article/1438407961481),并欢迎参与讨论~
|
||||
> * 加入[**黑客派**](https://hacpai.com/register),与其他程序员、设计师共同成长!
|
||||
|
||||
## Introduction
|
||||
|
||||
A <b>W</b>eb-based <b>IDE</b> for Teams using Go programming language/Golang.
|
||||
|
||||
![Hello, 世界](https://cloud.githubusercontent.com/assets/873584/4606377/d0ca3c2a-521b-11e4-912c-d955ab05850b.png)
|
||||
|
||||
## Authors
|
||||
|
||||
[Daniel](https://github.com/88250) and [Vanessa](https://github.com/Vanessa219) are the main authors of Wide, [here](https://github.com/b3log/wide/graphs/contributors) are all contributors.
|
||||
|
||||
Wide 的主要作者是 [Daniel](https://github.com/88250) 与 [Vanessa](https://github.com/Vanessa219),所有贡献者可以在[这里](https://github.com/b3log/wide/graphs/contributors)看到。
|
||||
|
||||
## Motivation
|
||||
|
||||
* **Team** IDE:
|
||||
* _Safe and reliable_: the project source code stored on the server in real time, the developer's machine crashes without losing any source code
|
||||
* _Unified environment_: server unified development environment configuration, the developer machine without any additional configuration
|
||||
* _Out of the box_: 5 minutes to setup a server then open browser to develop, debug
|
||||
* _Version Control_: each developer has its own source code repository, easy sync with the trunk
|
||||
* **Web-based** IDE:
|
||||
* Developer needs a browser only
|
||||
* Cross-platform, even on mobile devices
|
||||
* Easy to extend
|
||||
* Easy to integrate with other systems
|
||||
* For the geeks
|
||||
* A try for commercial-open source: versions customized for enterprises, close to their development work flows respectively
|
||||
* Currently more popular Go IDE has some defects or regrets:
|
||||
* Text editor (vim/emacs/sublime/Atom, etc.): For the Go newbie is too complex
|
||||
* Plug-in (goclipse, etc.): the need for the original IDE support, not professional
|
||||
* LiteIDE: no modern user interface :p
|
||||
* No team development experience
|
||||
* There are a few of GO IDEs, and no one developed by Go itself, this is a nice try
|
||||
|
||||
## Features
|
||||
|
||||
* [X] Code Highlight, Folding: Go/HTML/JavaScript/Markdown etc.
|
||||
* [X] Autocomplete: Go/HTML etc.
|
||||
* [X] Format: Go/HTML/JSON etc.
|
||||
* [X] Build & Run
|
||||
* [X] Multiplayer: a real team development experience
|
||||
* [X] Navigation, Jump to declaration, Find usages, File search etc.
|
||||
* [X] Shell: run command on the server
|
||||
* [X] Web development: HTML/JS/CSS editor with [Emmet](https://emmet.io) integrated
|
||||
* [X] Go tool: go get/install/fmt etc.
|
||||
* [X] File Import & Export
|
||||
* [X] Themes: editor and UI adjust, respectively
|
||||
* [X] Cross-Compilation
|
||||
* [ ] Debug
|
||||
* [ ] Git integration: git command on the web
|
||||
|
||||
## Screenshots
|
||||
|
||||
* **Overview**
|
||||
|
||||
![Overview](https://cloud.githubusercontent.com/assets/873584/5450620/1d51831e-8543-11e4-930b-670871902425.png)
|
||||
* **Goto File**
|
||||
|
||||
![Goto File](https://cloud.githubusercontent.com/assets/873584/5450616/1d495da6-8543-11e4-9285-f9d9c60779ac.png)
|
||||
* **Autocomplete**
|
||||
|
||||
![Autocomplete](https://cloud.githubusercontent.com/assets/873584/5450619/1d4d5712-8543-11e4-8fe4-35dbc8348a6e.png)
|
||||
* **Theme**
|
||||
|
||||
![4](https://cloud.githubusercontent.com/assets/873584/5450617/1d4c0826-8543-11e4-8b86-f79a4e41550a.png)
|
||||
* **Show Expression Info**
|
||||
|
||||
![Show Expression Info](https://cloud.githubusercontent.com/assets/873584/5450618/1d4cd9f4-8543-11e4-950f-121bd3ff4a39.png)
|
||||
* **Build Error Info**
|
||||
|
||||
![Build Error Info](https://cloud.githubusercontent.com/assets/873584/5450632/3e51cccc-8543-11e4-8ca8-8d2427aa16b8.png)
|
||||
* **Git Clone**
|
||||
|
||||
![Git Clone](https://cloud.githubusercontent.com/assets/873584/6545235/2284f230-c5b7-11e4-985e-7e04367921b1.png)
|
||||
* **Cross-Compilation**
|
||||
|
||||
![Cross-Compilation](https://cloud.githubusercontent.com/assets/873584/10130037/226d75fc-65f7-11e5-94e4-25ee579ca175.png)
|
||||
|
||||
* **Playground**
|
||||
![Playground](https://cloud.githubusercontent.com/assets/873584/21209772/449ecfd2-c2b1-11e6-9aa6-a83477d9f269.gif)
|
||||
|
||||
## Architecture
|
||||
|
||||
### Build & Run
|
||||
|
||||
![Build & Run](https://cloud.githubusercontent.com/assets/873584/4389219/3642bc62-43f3-11e4-8d1f-06d7aaf22784.png)
|
||||
|
||||
* A browser tab corresponds to a Wide session
|
||||
* Execution output push via WebSocket
|
||||
|
||||
Flow:
|
||||
1. Browser sends ````Build```` request
|
||||
2. Server executes ````go build```` command via ````os/exec````<br/>
|
||||
2.1. Generates a executable file
|
||||
3. Browser sends ````Run```` request
|
||||
4. Server executes the file via ````os/exec````<br/>
|
||||
4.1. A running process<br/>
|
||||
4.2. Execution output push via WebSocket channel
|
||||
5. Browser renders with callback function ````ws.onmessage````
|
||||
|
||||
### Code Assist
|
||||
|
||||
![Code Assist](https://cloud.githubusercontent.com/assets/873584/4399135/3b80c21c-4463-11e4-8e94-7f7e8d12a4df.png)
|
||||
|
||||
* Autocompletion
|
||||
* Find Usages/Jump To Declaration/etc.
|
||||
|
||||
Flow:
|
||||
1. Browser sends code assist request
|
||||
2. Handler gets user workspace of the request with HTTP session
|
||||
3. Server executes ````gocode````/````ide_stub(gotools)````<br/>
|
||||
3.1 Sets environment variables (e.g. ${GOPATH})<br/>
|
||||
3.2 ````gocode```` with ````lib-path```` parameter
|
||||
|
||||
## Documents
|
||||
|
||||
* [用户指南](https://www.gitbook.com/book/88250/wide-user-guide)
|
||||
* [开发指南](https://www.gitbook.com/book/88250/wide-dev-guide)
|
||||
|
||||
## Setup
|
||||
|
||||
### Download Binary
|
||||
|
||||
We have provided OS-specific executable binary as follows:
|
||||
|
||||
* linux-amd64/386
|
||||
* windows-amd64/386
|
||||
* darwin-amd64/386
|
||||
|
||||
Download [HERE](https://pan.baidu.com/s/1dD3XwOT)!
|
||||
|
||||
### Build Wide for yourself
|
||||
|
||||
1. [Download](https://github.com/b3log/wide/archive/master.zip) source or by `git clone https://github.com/b3log/wide`
|
||||
2. Get dependencies with
|
||||
* `go get`
|
||||
* `go get github.com/visualfc/gotools github.com/nsf/gocode github.com/bradfitz/goimports`
|
||||
3. Compile wide with `go build`
|
||||
|
||||
### Docker
|
||||
|
||||
1. Get image: `sudo docker pull 88250/wide:latest`
|
||||
2. Run: `sudo docker run -p 127.0.0.1:7070:7070 88250/wide:latest ./wide -docker=true -channel=ws://127.0.0.1:7070`
|
||||
3. Open browser: http://127.0.0.1:7070
|
||||
|
||||
## Known Issues
|
||||
|
||||
* [Shell is not available on Windows](https://github.com/b3log/wide/issues/32)
|
||||
* [Rename directory](https://github.com/b3log/wide/issues/251)
|
||||
|
||||
## Terms
|
||||
|
||||
* This software is open sourced under the Apache License 2.0
|
||||
* You can not get rid of the "Powered by [B3log](https://b3log.org)" from any page, even which you made
|
||||
* If you want to use this software for commercial purpose, please mail to support@liuyun.io for a commercial license request
|
||||
* Copyright © b3log.org, all rights reserved
|
||||
|
||||
## Credits
|
||||
|
||||
Wide is made possible by the following open source projects.
|
||||
|
||||
* [golang](https://golang.org)
|
||||
* [CodeMirror](https://github.com/marijnh/CodeMirror)
|
||||
* [zTree](https://github.com/zTree/zTree_v3)
|
||||
* [LiteIDE](https://github.com/visualfc/liteide)
|
||||
* [gocode](https://github.com/nsf/gocode)
|
||||
* [Gorilla](https://github.com/gorilla)
|
||||
* [Docker](https://docker.com)
|
||||
|
||||
----
|
||||
|
||||
<img src="https://cloud.githubusercontent.com/assets/873584/4606328/4e848b96-5219-11e4-8db1-fa12774b57b4.png" width="256px" />
|
|
@ -0,0 +1,4 @@
|
|||
* This software is open sourced under the Apache License 2.0
|
||||
* You can not get rid of the "Powered by [B3log](https://b3log.org)" from any pages, even the pages are developed by you
|
||||
* If you want to use this software for commercial purpose, please mail to support@liuyun.io for request a commercial license
|
||||
* Copyright (c) b3log.org, all rights reserved
|
|
@ -0,0 +1,24 @@
|
|||
#!/bin/bash
|
||||
# see https://gist.github.com/hailiang/0f22736320abe6be71ce for more details
|
||||
|
||||
set -e
|
||||
|
||||
# Run test coverage on each subdirectories and merge the coverage profile.
|
||||
|
||||
echo "mode: count" > profile.cov
|
||||
|
||||
# Standard go tooling behavior is to ignore dirs with leading underscors
|
||||
for dir in $(find . -maxdepth 10 -not -path './.git*' -not -path '*/_*' -type d);
|
||||
do
|
||||
if ls $dir/*.go &> /dev/null; then
|
||||
go test -covermode=count -coverprofile=$dir/profile.tmp $dir
|
||||
if [ -f $dir/profile.tmp ]
|
||||
then
|
||||
cat $dir/profile.tmp | tail -n +2 >> profile.cov
|
||||
rm $dir/profile.tmp
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
go tool cover -func profile.cov
|
||||
|
|
@ -0,0 +1,135 @@
|
|||
/*
|
||||
* Copyright (c) 2014-2015, b3log.org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @file frontend tool.
|
||||
*
|
||||
* @author <a href="mailto:liliyuan@fangstar.net">Liyuan Li</a>
|
||||
* @version 0.1.0.0, Dec 15, 2015
|
||||
*/
|
||||
var gulp = require("gulp");
|
||||
var concat = require('gulp-concat');
|
||||
var minifyCSS = require('gulp-minify-css');
|
||||
var uglify = require('gulp-uglify');
|
||||
var sourcemaps = require("gulp-sourcemaps");
|
||||
|
||||
gulp.task('cc', function () {
|
||||
// css
|
||||
var cssLibs = ['./static/js/lib/jquery-layout/layout-default-latest.css',
|
||||
'./static/js/lib/codemirror-5.1/codemirror.css',
|
||||
'./static/js/lib/codemirror-5.1/addon/hint/show-hint.css',
|
||||
'./static/js/lib/codemirror-5.1/addon/lint/lint.css',
|
||||
'./static/js/lib/codemirror-5.1/addon/fold/foldgutter.css',
|
||||
'./static/js/lib/codemirror-5.1/addon/dialog/dialog.css',
|
||||
'./static/js/overwrite/codemirror/theme/*.css'];
|
||||
gulp.src(cssLibs)
|
||||
.pipe(minifyCSS())
|
||||
.pipe(concat('lib.min.css'))
|
||||
.pipe(gulp.dest('./static/css/'));
|
||||
|
||||
gulp.src('./static/js/lib/ztree/zTreeStyle.css')
|
||||
.pipe(minifyCSS())
|
||||
.pipe(concat('zTreeStyle.min.css'))
|
||||
.pipe(gulp.dest('./static/js/lib/ztree/'));
|
||||
|
||||
var cssWide = ['./static/css/dialog.css',
|
||||
'./static/css/base.css',
|
||||
'./static/css/wide.css',
|
||||
'./static/css/side.css',
|
||||
'./static/css/start.css',
|
||||
'./static/css/about.css'
|
||||
];
|
||||
|
||||
gulp.src(cssWide)
|
||||
.pipe(minifyCSS())
|
||||
.pipe(concat('wide.min.css'))
|
||||
.pipe(gulp.dest('./static/css/'));
|
||||
|
||||
|
||||
// js
|
||||
var jsLibs = ['./static/js/lib/jquery-2.1.1.min.js',
|
||||
'./static/js/lib/jquery-ui.min.js',
|
||||
'./static/js/lib/jquery-layout/jquery.layout-latest.js',
|
||||
'./static/js/lib/reconnecting-websocket.js',
|
||||
'./static/js/lib/Autolinker.min.js',
|
||||
'./static/js/lib/emmet.js',
|
||||
'./static/js/lib/js-beautify-1.5.4/beautify.js',
|
||||
'./static/js/lib/js-beautify-1.5.4/beautify-html.js',
|
||||
'./static/js/lib/js-beautify-1.5.4/beautify-css.js',
|
||||
'./static/js/lib/jquery-file-upload-9.8.0/vendor/jquery.ui.widget.js',
|
||||
'./static/js/lib/jquery-file-upload-9.8.0/jquery.iframe-transport.js',
|
||||
'./static/js/lib/jquery-file-upload-9.8.0/jquery.fileupload.js',
|
||||
'./static/js/lib/codemirror-5.1/codemirror.min.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/lint/lint.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/lint/json-lint.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/selection/active-line.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/selection/active-line.js',
|
||||
'./static/js/overwrite/codemirror/addon/hint/show-hint.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/hint/anyword-hint.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/display/rulers.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/edit/closebrackets.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/edit/matchbrackets.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/edit/closetag.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/search/searchcursor.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/search/search.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/dialog/dialog.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/search/match-highlighter.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/fold/foldcode.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/fold/foldgutter.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/fold/brace-fold.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/fold/xml-fold.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/fold/markdown-fold.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/fold/comment-fold.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/fold/mode/loadmode.js',
|
||||
'./static/js/lib/codemirror-5.1/addon/fold/comment/comment.js',
|
||||
'./static/js/lib/codemirror-5.1/mode/meta.js',
|
||||
'./static/js/lib/codemirror-5.1/mode/go/go.js',
|
||||
'./static/js/lib/codemirror-5.1/mode/clike/clike.js',
|
||||
'./static/js/lib/codemirror-5.1/mode/xml/xml.js',
|
||||
'./static/js/lib/codemirror-5.1/mode/htmlmixed/htmlmixed.js',
|
||||
'./static/js/lib/codemirror-5.1/mode/javascript/javascript.js',
|
||||
'./static/js/lib/codemirror-5.1/mode/markdown/markdown.js',
|
||||
'./static/js/lib/codemirror-5.1/mode/css/css.js',
|
||||
'./static/js/lib/codemirror-5.1/mode/shell/shell.js',
|
||||
'./static/js/lib/codemirror-5.1/mode/sql/sql.js',
|
||||
'./static/js/lib/codemirror-5.1/keymap/vim.js',
|
||||
'./static/js/lib/lint/json-lint.js',
|
||||
'./static/js/lib/lint/go-lint.js'];
|
||||
gulp.src(jsLibs)
|
||||
.pipe(uglify())
|
||||
.pipe(concat('lib.min.js'))
|
||||
.pipe(gulp.dest('./static/js/'));
|
||||
|
||||
var jsWide = ['./static/js/tabs.js',
|
||||
'./static/js/tabs.js',
|
||||
'./static/js/dialog.js',
|
||||
'./static/js/editors.js',
|
||||
'./static/js/notification.js',
|
||||
'./static/js/tree.js',
|
||||
'./static/js/wide.js',
|
||||
'./static/js/session.js',
|
||||
'./static/js/menu.js',
|
||||
'./static/js/windows.js',
|
||||
'./static/js/hotkeys.js',
|
||||
'./static/js/bottomGroup.js'
|
||||
];
|
||||
gulp.src(jsWide)
|
||||
.pipe(sourcemaps.init())
|
||||
.pipe(uglify())
|
||||
.pipe(concat('wide.min.js'))
|
||||
.pipe(sourcemaps.write("."))
|
||||
.pipe(gulp.dest('./static/js/'));
|
||||
});
|
Binary file not shown.
|
@ -0,0 +1,483 @@
|
|||
// Copyright (c) 2014-2018, b3log.org & hacpai.com
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"compress/gzip"
|
||||
"flag"
|
||||
"html/template"
|
||||
"io"
|
||||
"mime"
|
||||
"net/http"
|
||||
_ "net/http/pprof"
|
||||
"os"
|
||||
"os/signal"
|
||||
"runtime"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/b3log/wide/conf"
|
||||
"github.com/b3log/wide/editor"
|
||||
"github.com/b3log/wide/event"
|
||||
"github.com/b3log/wide/file"
|
||||
"github.com/b3log/wide/i18n"
|
||||
"github.com/b3log/wide/log"
|
||||
"github.com/b3log/wide/notification"
|
||||
"github.com/b3log/wide/output"
|
||||
"github.com/b3log/wide/playground"
|
||||
"github.com/b3log/wide/scm/git"
|
||||
"github.com/b3log/wide/session"
|
||||
"github.com/b3log/wide/util"
|
||||
)
|
||||
|
||||
// Logger
|
||||
var logger *log.Logger
|
||||
|
||||
// The only one init function in Wide.
|
||||
func init() {
|
||||
confPath := flag.String("conf", "conf/wide.json", "path of wide.json")
|
||||
confIP := flag.String("ip", "", "this will overwrite Wide.IP if specified")
|
||||
confPort := flag.String("port", "", "this will overwrite Wide.Port if specified")
|
||||
confServer := flag.String("server", "", "this will overwrite Wide.Server if specified")
|
||||
confLogLevel := flag.String("log_level", "", "this will overwrite Wide.LogLevel if specified")
|
||||
confStaticServer := flag.String("static_server", "", "this will overwrite Wide.StaticServer if specified")
|
||||
confContext := flag.String("context", "", "this will overwrite Wide.Context if specified")
|
||||
confChannel := flag.String("channel", "", "this will overwrite Wide.Channel if specified")
|
||||
confStat := flag.Bool("stat", false, "whether report statistics periodically")
|
||||
confDocker := flag.Bool("docker", false, "whether run in a docker container")
|
||||
confPlayground := flag.String("playground", "", "this will overwrite Wide.Playground if specified")
|
||||
confUsersWorkspaces := flag.String("users_workspaces", "", "this will overwrite Wide.UsersWorkspaces if specified")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
log.SetLevel("warn")
|
||||
logger = log.NewLogger(os.Stdout)
|
||||
|
||||
wd := util.OS.Pwd()
|
||||
if strings.HasPrefix(wd, os.TempDir()) {
|
||||
logger.Error("Don't run Wide in OS' temp directory or with `go run`")
|
||||
|
||||
os.Exit(-1)
|
||||
}
|
||||
|
||||
i18n.Load()
|
||||
event.Load()
|
||||
conf.Load(*confPath, *confIP, *confPort, *confServer, *confLogLevel, *confStaticServer, *confContext, *confChannel,
|
||||
*confPlayground, *confDocker, *confUsersWorkspaces)
|
||||
|
||||
conf.FixedTimeCheckEnv()
|
||||
session.FixedTimeSave()
|
||||
session.FixedTimeRelease()
|
||||
|
||||
if *confStat {
|
||||
session.FixedTimeReport()
|
||||
}
|
||||
|
||||
logger.Debug("host ["+runtime.Version()+", "+runtime.GOOS+"_"+runtime.GOARCH+"], cross-compilation ",
|
||||
util.Go.GetCrossPlatforms())
|
||||
}
|
||||
|
||||
// Main.
|
||||
func main() {
|
||||
runtime.GOMAXPROCS(conf.Wide.MaxProcs)
|
||||
|
||||
initMime()
|
||||
handleSignal()
|
||||
|
||||
// IDE
|
||||
http.HandleFunc(conf.Wide.Context+"/", handlerGzWrapper(indexHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/start", handlerWrapper(startHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/about", handlerWrapper(aboutHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/keyboard_shortcuts", handlerWrapper(keyboardShortcutsHandler))
|
||||
|
||||
// static resources
|
||||
http.Handle(conf.Wide.Context+"/static/", http.StripPrefix(conf.Wide.Context+"/static/", http.FileServer(http.Dir("static"))))
|
||||
serveSingle("/favicon.ico", "./static/favicon.ico")
|
||||
|
||||
// workspaces
|
||||
for _, user := range conf.Users {
|
||||
http.Handle(conf.Wide.Context+"/workspace/"+user.Name+"/",
|
||||
http.StripPrefix(conf.Wide.Context+"/workspace/"+user.Name+"/", http.FileServer(http.Dir(user.WorkspacePath()))))
|
||||
}
|
||||
|
||||
// session
|
||||
http.HandleFunc(conf.Wide.Context+"/session/ws", handlerWrapper(session.WSHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/session/save", handlerWrapper(session.SaveContentHandler))
|
||||
|
||||
// run
|
||||
http.HandleFunc(conf.Wide.Context+"/build", handlerWrapper(output.BuildHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/run", handlerWrapper(output.RunHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/stop", handlerWrapper(output.StopHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/go/test", handlerWrapper(output.GoTestHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/go/vet", handlerWrapper(output.GoVetHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/go/get", handlerWrapper(output.GoGetHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/go/install", handlerWrapper(output.GoInstallHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/output/ws", handlerWrapper(output.WSHandler))
|
||||
|
||||
// cross-compilation
|
||||
http.HandleFunc(conf.Wide.Context+"/cross", handlerWrapper(output.CrossCompilationHandler))
|
||||
|
||||
// file tree
|
||||
http.HandleFunc(conf.Wide.Context+"/files", handlerWrapper(file.GetFilesHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/file/refresh", handlerWrapper(file.RefreshDirectoryHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/file", handlerWrapper(file.GetFileHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/file/save", handlerWrapper(file.SaveFileHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/file/new", handlerWrapper(file.NewFileHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/file/remove", handlerWrapper(file.RemoveFileHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/file/rename", handlerWrapper(file.RenameFileHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/file/search/text", handlerWrapper(file.SearchTextHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/file/find/name", handlerWrapper(file.FindHandler))
|
||||
|
||||
// outline
|
||||
http.HandleFunc(conf.Wide.Context+"/outline", handlerWrapper(file.GetOutlineHandler))
|
||||
|
||||
// file export/import
|
||||
http.HandleFunc(conf.Wide.Context+"/file/zip/new", handlerWrapper(file.CreateZipHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/file/zip", handlerWrapper(file.GetZipHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/file/upload", handlerWrapper(file.UploadHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/file/decompress", handlerWrapper(file.DecompressHandler))
|
||||
|
||||
// editor
|
||||
http.HandleFunc(conf.Wide.Context+"/editor/ws", handlerWrapper(editor.WSHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/go/fmt", handlerWrapper(editor.GoFmtHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/autocomplete", handlerWrapper(editor.AutocompleteHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/exprinfo", handlerWrapper(editor.GetExprInfoHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/find/decl", handlerWrapper(editor.FindDeclarationHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/find/usages", handlerWrapper(editor.FindUsagesHandler))
|
||||
|
||||
// shell
|
||||
// http.HandleFunc(conf.Wide.Context+"/shell/ws", handlerWrapper(shell.WSHandler))
|
||||
// http.HandleFunc(conf.Wide.Context+"/shell", handlerWrapper(shell.IndexHandler))
|
||||
|
||||
// notification
|
||||
http.HandleFunc(conf.Wide.Context+"/notification/ws", handlerWrapper(notification.WSHandler))
|
||||
|
||||
// user
|
||||
http.HandleFunc(conf.Wide.Context+"/login", handlerWrapper(session.LoginHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/logout", handlerWrapper(session.LogoutHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/signup", handlerWrapper(session.SignUpUserHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/preference", handlerWrapper(session.PreferenceHandler))
|
||||
|
||||
// playground
|
||||
http.HandleFunc(conf.Wide.Context+"/playground", handlerWrapper(playground.IndexHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/playground/", handlerWrapper(playground.IndexHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/playground/ws", handlerWrapper(playground.WSHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/playground/save", handlerWrapper(playground.SaveHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/playground/short-url", handlerWrapper(playground.ShortURLHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/playground/build", handlerWrapper(playground.BuildHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/playground/run", handlerWrapper(playground.RunHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/playground/stop", handlerWrapper(playground.StopHandler))
|
||||
http.HandleFunc(conf.Wide.Context+"/playground/autocomplete", handlerWrapper(playground.AutocompleteHandler))
|
||||
|
||||
// git
|
||||
http.HandleFunc(conf.Wide.Context+"/git/clone", handlerWrapper(git.CloneHandler))
|
||||
|
||||
logger.Infof("Wide is running [%s]", conf.Wide.Server+conf.Wide.Context)
|
||||
|
||||
err := http.ListenAndServe(conf.Wide.Server, nil)
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
// indexHandler handles request of Wide index.
|
||||
func indexHandler(w http.ResponseWriter, r *http.Request) {
|
||||
if conf.Wide.Context+"/" != r.RequestURI {
|
||||
http.Redirect(w, r, conf.Wide.Context+"/", http.StatusFound)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
httpSession, _ := session.HTTPSession.Get(r, "wide-session")
|
||||
if httpSession.IsNew {
|
||||
http.Redirect(w, r, conf.Wide.Context+"/login", http.StatusFound)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
username := httpSession.Values["username"].(string)
|
||||
if "playground" == username { // reserved user for Playground
|
||||
http.Redirect(w, r, conf.Wide.Context+"/login", http.StatusFound)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
httpSession.Options.MaxAge = conf.Wide.HTTPSessionMaxAge
|
||||
if "" != conf.Wide.Context {
|
||||
httpSession.Options.Path = conf.Wide.Context
|
||||
}
|
||||
httpSession.Save(r, w)
|
||||
|
||||
user := conf.GetUser(username)
|
||||
if nil == user {
|
||||
logger.Warnf("Not found user [%s]", username)
|
||||
|
||||
http.Redirect(w, r, conf.Wide.Context+"/login", http.StatusFound)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
locale := user.Locale
|
||||
|
||||
wideSessions := session.WideSessions.GetByUsername(username)
|
||||
|
||||
model := map[string]interface{}{"conf": conf.Wide, "i18n": i18n.GetAll(locale), "locale": locale,
|
||||
"username": username, "sid": session.WideSessions.GenId(), "latestSessionContent": user.LatestSessionContent,
|
||||
"pathSeparator": conf.PathSeparator, "codeMirrorVer": conf.CodeMirrorVer,
|
||||
"user": user, "editorThemes": conf.GetEditorThemes(), "crossPlatforms": util.Go.GetCrossPlatforms()}
|
||||
|
||||
logger.Debugf("User [%s] has [%d] sessions", username, len(wideSessions))
|
||||
|
||||
t, err := template.ParseFiles("views/index.html")
|
||||
if nil != err {
|
||||
logger.Error(err)
|
||||
http.Error(w, err.Error(), 500)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
t.Execute(w, model)
|
||||
}
|
||||
|
||||
// handleSignal handles system signal for graceful shutdown.
|
||||
func handleSignal() {
|
||||
go func() {
|
||||
c := make(chan os.Signal)
|
||||
|
||||
signal.Notify(c, syscall.SIGINT, syscall.SIGQUIT, syscall.SIGTERM)
|
||||
s := <-c
|
||||
logger.Tracef("Got signal [%s]", s)
|
||||
|
||||
session.SaveOnlineUsers()
|
||||
logger.Tracef("Saved all online user, exit")
|
||||
|
||||
os.Exit(0)
|
||||
}()
|
||||
}
|
||||
|
||||
// serveSingle registers the handler function for the given pattern and filename.
|
||||
func serveSingle(pattern string, filename string) {
|
||||
http.HandleFunc(pattern, func(w http.ResponseWriter, r *http.Request) {
|
||||
http.ServeFile(w, r, filename)
|
||||
})
|
||||
}
|
||||
|
||||
// startHandler handles request of start page.
|
||||
func startHandler(w http.ResponseWriter, r *http.Request) {
|
||||
httpSession, _ := session.HTTPSession.Get(r, "wide-session")
|
||||
if httpSession.IsNew {
|
||||
http.Redirect(w, r, conf.Wide.Context+"/login", http.StatusFound)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
httpSession.Options.MaxAge = conf.Wide.HTTPSessionMaxAge
|
||||
if "" != conf.Wide.Context {
|
||||
httpSession.Options.Path = conf.Wide.Context
|
||||
}
|
||||
httpSession.Save(r, w)
|
||||
|
||||
username := httpSession.Values["username"].(string)
|
||||
locale := conf.GetUser(username).Locale
|
||||
userWorkspace := conf.GetUserWorkspace(username)
|
||||
|
||||
sid := r.URL.Query()["sid"][0]
|
||||
wSession := session.WideSessions.Get(sid)
|
||||
if nil == wSession {
|
||||
logger.Errorf("Session [%s] not found", sid)
|
||||
}
|
||||
|
||||
model := map[string]interface{}{"conf": conf.Wide, "i18n": i18n.GetAll(locale), "locale": locale,
|
||||
"username": username, "workspace": userWorkspace, "ver": conf.WideVersion, "sid": sid}
|
||||
|
||||
t, err := template.ParseFiles("views/start.html")
|
||||
|
||||
if nil != err {
|
||||
logger.Error(err)
|
||||
http.Error(w, err.Error(), 500)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
t.Execute(w, model)
|
||||
}
|
||||
|
||||
// keyboardShortcutsHandler handles request of keyboard shortcuts page.
|
||||
func keyboardShortcutsHandler(w http.ResponseWriter, r *http.Request) {
|
||||
httpSession, _ := session.HTTPSession.Get(r, "wide-session")
|
||||
if httpSession.IsNew {
|
||||
http.Redirect(w, r, conf.Wide.Context+"/login", http.StatusFound)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
httpSession.Options.MaxAge = conf.Wide.HTTPSessionMaxAge
|
||||
if "" != conf.Wide.Context {
|
||||
httpSession.Options.Path = conf.Wide.Context
|
||||
}
|
||||
httpSession.Save(r, w)
|
||||
|
||||
username := httpSession.Values["username"].(string)
|
||||
locale := conf.GetUser(username).Locale
|
||||
|
||||
model := map[string]interface{}{"conf": conf.Wide, "i18n": i18n.GetAll(locale), "locale": locale}
|
||||
|
||||
t, err := template.ParseFiles("views/keyboard_shortcuts.html")
|
||||
|
||||
if nil != err {
|
||||
logger.Error(err)
|
||||
http.Error(w, err.Error(), 500)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
t.Execute(w, model)
|
||||
}
|
||||
|
||||
// aboutHandle handles request of about page.
|
||||
func aboutHandler(w http.ResponseWriter, r *http.Request) {
|
||||
httpSession, _ := session.HTTPSession.Get(r, "wide-session")
|
||||
if httpSession.IsNew {
|
||||
http.Redirect(w, r, conf.Wide.Context+"/login", http.StatusFound)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
httpSession.Options.MaxAge = conf.Wide.HTTPSessionMaxAge
|
||||
if "" != conf.Wide.Context {
|
||||
httpSession.Options.Path = conf.Wide.Context
|
||||
}
|
||||
httpSession.Save(r, w)
|
||||
|
||||
username := httpSession.Values["username"].(string)
|
||||
locale := conf.GetUser(username).Locale
|
||||
|
||||
model := map[string]interface{}{"conf": conf.Wide, "i18n": i18n.GetAll(locale), "locale": locale,
|
||||
"ver": conf.WideVersion, "goos": runtime.GOOS, "goarch": runtime.GOARCH, "gover": runtime.Version()}
|
||||
|
||||
t, err := template.ParseFiles("views/about.html")
|
||||
|
||||
if nil != err {
|
||||
logger.Error(err)
|
||||
http.Error(w, err.Error(), 500)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
t.Execute(w, model)
|
||||
}
|
||||
|
||||
// handlerWrapper wraps the HTTP Handler for some common processes.
|
||||
//
|
||||
// 1. panic recover
|
||||
// 2. request stopwatch
|
||||
// 3. i18n
|
||||
func handlerWrapper(f func(w http.ResponseWriter, r *http.Request)) func(w http.ResponseWriter, r *http.Request) {
|
||||
handler := panicRecover(f)
|
||||
handler = stopwatch(handler)
|
||||
handler = i18nLoad(handler)
|
||||
|
||||
return handler
|
||||
}
|
||||
|
||||
// handlerGzWrapper wraps the HTTP Handler for some common processes.
|
||||
//
|
||||
// 1. panic recover
|
||||
// 2. gzip response
|
||||
// 3. request stopwatch
|
||||
// 4. i18n
|
||||
func handlerGzWrapper(f func(w http.ResponseWriter, r *http.Request)) func(w http.ResponseWriter, r *http.Request) {
|
||||
handler := panicRecover(f)
|
||||
handler = gzipWrapper(handler)
|
||||
handler = stopwatch(handler)
|
||||
handler = i18nLoad(handler)
|
||||
|
||||
return handler
|
||||
}
|
||||
|
||||
// gzipWrapper wraps the process with response gzip.
|
||||
func gzipWrapper(f func(http.ResponseWriter, *http.Request)) func(w http.ResponseWriter, r *http.Request) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if !strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
|
||||
f(w, r)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Encoding", "gzip")
|
||||
gz := gzip.NewWriter(w)
|
||||
defer gz.Close()
|
||||
gzr := gzipResponseWriter{Writer: gz, ResponseWriter: w}
|
||||
|
||||
f(gzr, r)
|
||||
}
|
||||
}
|
||||
|
||||
// i18nLoad wraps the i18n process.
|
||||
func i18nLoad(handler func(w http.ResponseWriter, r *http.Request)) func(w http.ResponseWriter, r *http.Request) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
i18n.Load()
|
||||
|
||||
handler(w, r)
|
||||
}
|
||||
}
|
||||
|
||||
// stopwatch wraps the request stopwatch process.
|
||||
func stopwatch(handler func(w http.ResponseWriter, r *http.Request)) func(w http.ResponseWriter, r *http.Request) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
start := time.Now()
|
||||
|
||||
defer func() {
|
||||
logger.Tracef("[%s, %s, %s]", r.Method, r.RequestURI, time.Since(start))
|
||||
}()
|
||||
|
||||
handler(w, r)
|
||||
}
|
||||
}
|
||||
|
||||
// panicRecover wraps the panic recover process.
|
||||
func panicRecover(handler func(w http.ResponseWriter, r *http.Request)) func(w http.ResponseWriter, r *http.Request) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
defer util.Recover()
|
||||
|
||||
handler(w, r)
|
||||
}
|
||||
}
|
||||
|
||||
// initMime initializes mime types.
|
||||
//
|
||||
// We can't get the mime types on some OS (such as Windows XP) by default, so initializes them here.
|
||||
func initMime() {
|
||||
mime.AddExtensionType(".css", "text/css")
|
||||
mime.AddExtensionType(".js", "application/x-javascript")
|
||||
mime.AddExtensionType(".json", "application/json")
|
||||
}
|
||||
|
||||
// gzipResponseWriter represents a gzip response writer.
|
||||
type gzipResponseWriter struct {
|
||||
io.Writer
|
||||
http.ResponseWriter
|
||||
}
|
||||
|
||||
// Write writes response with appropriate 'Content-Type'.
|
||||
func (w gzipResponseWriter) Write(b []byte) (int, error) {
|
||||
if "" == w.Header().Get("Content-Type") {
|
||||
// If no content type, apply sniffing algorithm to un-gzipped body.
|
||||
w.Header().Set("Content-Type", http.DetectContentType(b))
|
||||
}
|
||||
|
||||
return w.Writer.Write(b)
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
{
|
||||
"name": "wide",
|
||||
"version": "1.4.0",
|
||||
"description": "A Web-based IDE for Teams using Go programming language/Golang.",
|
||||
"homepage": "https://wide.b3log.org",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/b3log/wide.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/b3log/wide/issues"
|
||||
},
|
||||
"license": "Apache License",
|
||||
"private": true,
|
||||
"author": "Daniel <d@b3log.org> (http://88250.b3log.org) & Vanessa <v@b3log.org> (http://vanessa.b3log.org)",
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "Daniel",
|
||||
"email": "d@b3log.org"
|
||||
},
|
||||
{
|
||||
"name": "Vanessa",
|
||||
"email": "v@b3log.org"
|
||||
}
|
||||
],
|
||||
"devDependencies": {
|
||||
"gulp": "^3.9.1",
|
||||
"gulp-concat": "^2.6.1",
|
||||
"gulp-minify-css": "^1.2.4",
|
||||
"gulp-sourcemaps": "^2.6.0",
|
||||
"gulp-uglify": "^2.1.2"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Wide package tool.
|
||||
#
|
||||
# Command:
|
||||
# ./pkg.sh ${version} ${target}
|
||||
# Example:
|
||||
# ./pkg.sh 1.0.0 /home/daniel/1.0.0/
|
||||
|
||||
ver=$1
|
||||
target=$2
|
||||
list="conf doc i18n static views README.md TERMS.md LICENSE"
|
||||
|
||||
mkdir -p ${target}
|
||||
|
||||
echo version=${ver}
|
||||
echo target=${target}
|
||||
|
||||
## darwin
|
||||
os=darwin
|
||||
|
||||
export GOOS=${os}
|
||||
export GOARCH=amd64
|
||||
echo wide-${ver}-${GOOS}-${GOARCH}.tar.gz
|
||||
go build
|
||||
go build github.com/visualfc/gotools
|
||||
go build github.com/nsf/gocode
|
||||
tar zcf ${target}/wide-${ver}-${GOOS}-${GOARCH}.tar.gz ${list} gotools gocode wide --exclude-vcs --exclude='conf/*.go' --exclude='i18n/*.go'
|
||||
rm -f wide gotools gocode
|
||||
|
||||
export GOOS=${os}
|
||||
export GOARCH=386
|
||||
echo wide-${ver}-${GOOS}-${GOARCH}.tar.gz
|
||||
go build
|
||||
go build github.com/visualfc/gotools
|
||||
go build github.com/nsf/gocode
|
||||
tar zcf ${target}/wide-${ver}-${GOOS}-${GOARCH}.tar.gz ${list} gotools gocode wide --exclude-vcs --exclude='conf/*.go' --exclude='i18n/*.go'
|
||||
rm -f wide gotools gocode
|
||||
|
||||
## linux
|
||||
os=linux
|
||||
|
||||
export GOOS=${os}
|
||||
export GOARCH=amd64
|
||||
echo wide-${ver}-${GOOS}-${GOARCH}.tar.gz
|
||||
go build
|
||||
go build github.com/visualfc/gotools
|
||||
go build github.com/nsf/gocode
|
||||
tar zcf ${target}/wide-${ver}-${GOOS}-${GOARCH}.tar.gz ${list} gotools gocode wide --exclude-vcs --exclude='conf/*.go' --exclude='i18n/*.go'
|
||||
rm -f wide gotools gocode
|
||||
|
||||
export GOOS=${os}
|
||||
export GOARCH=386
|
||||
echo wide-${ver}-${GOOS}-${GOARCH}.tar.gz
|
||||
go build
|
||||
go build github.com/visualfc/gotools
|
||||
go build github.com/nsf/gocode
|
||||
tar zcf ${target}/wide-${ver}-${GOOS}-${GOARCH}.tar.gz ${list} gotools gocode wide --exclude-vcs --exclude='conf/*.go' --exclude='i18n/*.go'
|
||||
rm -f wide gotools gocode
|
||||
|
||||
## windows
|
||||
os=windows
|
||||
|
||||
export GOOS=${os}
|
||||
export GOARCH=amd64
|
||||
echo wide-${ver}-${GOOS}-${GOARCH}.zip
|
||||
go build
|
||||
go build github.com/visualfc/gotools
|
||||
go build github.com/nsf/gocode
|
||||
zip -r -q ${target}/wide-${ver}-${GOOS}-${GOARCH}.zip ${list} gotools.exe gocode.exe wide.exe --exclude=conf/*.go --exclude=i18n/*.go
|
||||
rm -f wide.exe gotools.exe gocode.exe
|
||||
|
||||
export GOOS=${os}
|
||||
export GOARCH=386
|
||||
echo wide-${ver}-${GOOS}-${GOARCH}.zip
|
||||
go build
|
||||
go build github.com/visualfc/gotools
|
||||
go build github.com/nsf/gocode
|
||||
zip -r -q ${target}/wide-${ver}-${GOOS}-${GOARCH}.zip ${list} gotools.exe gocode.exe wide.exe --exclude=conf/*.go --exclude=i18n/*.go
|
||||
rm -f wide.exe gotools.exe gocode.exe
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,27 @@
|
|||
Copyright (c) 2013 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -0,0 +1,17 @@
|
|||
This tool updates your Go import lines, adding missing ones and
|
||||
removing unreferenced ones.
|
||||
|
||||
$ go get golang.org/x/tools/cmd/goimports
|
||||
|
||||
Note the new location. This project has moved to the official
|
||||
go.tools repo. Pull requests here will no longer be accepted.
|
||||
Please use the Go process: http://golang.org/doc/contribute.html
|
||||
|
||||
It acts the same as gofmt (same flags, etc) but in addition to code
|
||||
formatting, also fixes imports.
|
||||
|
||||
See usage and editor integration notes, now moved elsewhere:
|
||||
|
||||
http://godoc.org/golang.org/x/tools/cmd/goimports
|
||||
|
||||
Happy hacking!
|
|
@ -0,0 +1,195 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/scanner"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/imports"
|
||||
)
|
||||
|
||||
var (
|
||||
// main operation modes
|
||||
list = flag.Bool("l", false, "list files whose formatting differs from goimport's")
|
||||
write = flag.Bool("w", false, "write result to (source) file instead of stdout")
|
||||
doDiff = flag.Bool("d", false, "display diffs instead of rewriting files")
|
||||
|
||||
options = &imports.Options{
|
||||
TabWidth: 8,
|
||||
TabIndent: true,
|
||||
Comments: true,
|
||||
Fragment: true,
|
||||
}
|
||||
exitCode = 0
|
||||
)
|
||||
|
||||
func init() {
|
||||
flag.BoolVar(&options.AllErrors, "e", false, "report all errors (not just the first 10 on different lines)")
|
||||
}
|
||||
|
||||
func report(err error) {
|
||||
scanner.PrintError(os.Stderr, err)
|
||||
exitCode = 2
|
||||
}
|
||||
|
||||
func usage() {
|
||||
fmt.Fprintf(os.Stderr, "usage: goimports [flags] [path ...]\n")
|
||||
flag.PrintDefaults()
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
func isGoFile(f os.FileInfo) bool {
|
||||
// ignore non-Go files
|
||||
name := f.Name()
|
||||
return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go")
|
||||
}
|
||||
|
||||
func processFile(filename string, in io.Reader, out io.Writer, stdin bool) error {
|
||||
opt := options
|
||||
if stdin {
|
||||
nopt := *options
|
||||
nopt.Fragment = true
|
||||
opt = &nopt
|
||||
}
|
||||
|
||||
if in == nil {
|
||||
f, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
in = f
|
||||
}
|
||||
|
||||
src, err := ioutil.ReadAll(in)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
res, err := imports.Process(filename, src, opt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !bytes.Equal(src, res) {
|
||||
// formatting has changed
|
||||
if *list {
|
||||
fmt.Fprintln(out, filename)
|
||||
}
|
||||
if *write {
|
||||
err = ioutil.WriteFile(filename, res, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if *doDiff {
|
||||
data, err := diff(src, res)
|
||||
if err != nil {
|
||||
return fmt.Errorf("computing diff: %s", err)
|
||||
}
|
||||
fmt.Printf("diff %s gofmt/%s\n", filename, filename)
|
||||
out.Write(data)
|
||||
}
|
||||
}
|
||||
|
||||
if !*list && !*write && !*doDiff {
|
||||
_, err = out.Write(res)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func visitFile(path string, f os.FileInfo, err error) error {
|
||||
if err == nil && isGoFile(f) {
|
||||
err = processFile(path, nil, os.Stdout, false)
|
||||
}
|
||||
if err != nil {
|
||||
report(err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func walkDir(path string) {
|
||||
filepath.Walk(path, visitFile)
|
||||
}
|
||||
|
||||
func main() {
|
||||
runtime.GOMAXPROCS(runtime.NumCPU())
|
||||
|
||||
// call gofmtMain in a separate function
|
||||
// so that it can use defer and have them
|
||||
// run before the exit.
|
||||
gofmtMain()
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
|
||||
func gofmtMain() {
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
|
||||
if options.TabWidth < 0 {
|
||||
fmt.Fprintf(os.Stderr, "negative tabwidth %d\n", options.TabWidth)
|
||||
exitCode = 2
|
||||
return
|
||||
}
|
||||
|
||||
if flag.NArg() == 0 {
|
||||
if err := processFile("<standard input>", os.Stdin, os.Stdout, true); err != nil {
|
||||
report(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for i := 0; i < flag.NArg(); i++ {
|
||||
path := flag.Arg(i)
|
||||
switch dir, err := os.Stat(path); {
|
||||
case err != nil:
|
||||
report(err)
|
||||
case dir.IsDir():
|
||||
walkDir(path)
|
||||
default:
|
||||
if err := processFile(path, nil, os.Stdout, false); err != nil {
|
||||
report(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func diff(b1, b2 []byte) (data []byte, err error) {
|
||||
f1, err := ioutil.TempFile("", "gofmt")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer os.Remove(f1.Name())
|
||||
defer f1.Close()
|
||||
|
||||
f2, err := ioutil.TempFile("", "gofmt")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer os.Remove(f2.Name())
|
||||
defer f2.Close()
|
||||
|
||||
f1.Write(b1)
|
||||
f2.Write(b2)
|
||||
|
||||
data, err = exec.Command("diff", "-u", f1.Name(), f2.Name()).CombinedOutput()
|
||||
if len(data) > 0 {
|
||||
// diff exits with a non-zero status when the files don't match.
|
||||
// Ignore that failure as long as we get output.
|
||||
err = nil
|
||||
}
|
||||
return
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
Copyright (C) 2010 nsf <no.smile.face@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
|
@ -0,0 +1,195 @@
|
|||
## An autocompletion daemon for the Go programming language
|
||||
|
||||
Gocode is a helper tool which is intended to be integrated with your source code editor, like vim, neovim and emacs. It provides several advanced capabilities, which currently includes:
|
||||
|
||||
- Context-sensitive autocompletion
|
||||
|
||||
It is called *daemon*, because it uses client/server architecture for caching purposes. In particular, it makes autocompletions very fast. Typical autocompletion time with warm cache is 30ms, which is barely noticeable.
|
||||
|
||||
Also watch the [demo screencast](http://nosmileface.ru/images/gocode-demo.swf).
|
||||
|
||||
![Gocode in vim](http://nosmileface.ru/images/gocode-screenshot.png)
|
||||
|
||||
![Gocode in emacs](http://nosmileface.ru/images/emacs-gocode.png)
|
||||
|
||||
### Setup
|
||||
|
||||
1. You should have a correctly installed Go compiler environment and your personal workspace ($GOPATH). If you have no idea what **$GOPATH** is, take a look [here](http://golang.org/doc/code.html). Please make sure that your **$GOPATH/bin** is available in your **$PATH**. This is important, because most editors assume that **gocode** binary is available in one of the directories, specified by your **$PATH** environment variable. Otherwise manually copy the **gocode** binary from **$GOPATH/bin** to a location which is part of your **$PATH** after getting it in step 2.
|
||||
|
||||
Do these steps only if you understand why you need to do them:
|
||||
|
||||
`export GOPATH=$HOME/goprojects`
|
||||
|
||||
`export PATH=$PATH:$GOPATH/bin`
|
||||
|
||||
2. Then you need to get the appropriate version of the gocode, for 6g/8g/5g compiler you can do this:
|
||||
|
||||
`go get -u github.com/nsf/gocode` (-u flag for "update")
|
||||
|
||||
Windows users should consider doing this instead:
|
||||
|
||||
`go get -u -ldflags -H=windowsgui github.com/nsf/gocode`
|
||||
|
||||
That way on the Windows OS gocode will be built as a GUI application and doing so solves hanging window issues with some of the editors.
|
||||
|
||||
3. Next steps are editor specific. See below.
|
||||
|
||||
### Vim setup
|
||||
|
||||
#### Vim manual installation
|
||||
|
||||
Note: As of go 1.5 there is no $GOROOT/misc/vim script. Suggested installation is via [vim-go plugin](https://github.com/fatih/vim-go).
|
||||
|
||||
In order to install vim scripts, you need to fulfill the following steps:
|
||||
|
||||
1. Install official Go vim scripts from **$GOROOT/misc/vim**. If you did that already, proceed to the step 2.
|
||||
|
||||
2. Install gocode vim scripts. Usually it's enough to do the following:
|
||||
|
||||
2.1. `vim/update.sh`
|
||||
|
||||
**update.sh** script does the following:
|
||||
|
||||
#!/bin/sh
|
||||
mkdir -p "$HOME/.vim/autoload"
|
||||
mkdir -p "$HOME/.vim/ftplugin/go"
|
||||
cp "${0%/*}/autoload/gocomplete.vim" "$HOME/.vim/autoload"
|
||||
cp "${0%/*}/ftplugin/go/gocomplete.vim" "$HOME/.vim/ftplugin/go"
|
||||
|
||||
2.2. Alternatively, you can create symlinks using symlink.sh script in order to avoid running update.sh after every gocode update.
|
||||
|
||||
**symlink.sh** script does the following:
|
||||
|
||||
#!/bin/sh
|
||||
cd "${0%/*}"
|
||||
ROOTDIR=`pwd`
|
||||
mkdir -p "$HOME/.vim/autoload"
|
||||
mkdir -p "$HOME/.vim/ftplugin/go"
|
||||
ln -s "$ROOTDIR/autoload/gocomplete.vim" "$HOME/.vim/autoload/"
|
||||
ln -s "$ROOTDIR/ftplugin/go/gocomplete.vim" "$HOME/.vim/ftplugin/go/"
|
||||
|
||||
3. Make sure vim has filetype plugin enabled. Simply add that to your **.vimrc**:
|
||||
|
||||
`filetype plugin on`
|
||||
|
||||
4. Autocompletion should work now. Use `<C-x><C-o>` for autocompletion (omnifunc autocompletion).
|
||||
|
||||
#### Using Vundle in Vim
|
||||
|
||||
Add the following line to your **.vimrc**:
|
||||
|
||||
`Plugin 'nsf/gocode', {'rtp': 'vim/'}`
|
||||
|
||||
And then update your packages by running `:PluginInstall`.
|
||||
|
||||
#### Using vim-plug in Vim
|
||||
|
||||
Add the following line to your **.vimrc**:
|
||||
|
||||
`Plug 'nsf/gocode', { 'rtp': 'vim', 'do': '~/.vim/plugged/gocode/vim/symlink.sh' }`
|
||||
|
||||
And then update your packages by running `:PlugInstall`.
|
||||
|
||||
#### Other
|
||||
|
||||
Alternatively take a look at the vundle/pathogen friendly repo: https://github.com/Blackrush/vim-gocode.
|
||||
|
||||
### Neovim setup
|
||||
#### Neovim manual installation
|
||||
|
||||
Neovim users should also follow `Vim manual installation`, except that you should goto `gocode/nvim` in step 2, and remember that, the Neovim configuration file is `~/.config/nvim/init.vim`.
|
||||
|
||||
#### Using Vundle in Neovim
|
||||
|
||||
Add the following line to your **init.vim**:
|
||||
|
||||
`Plugin 'nsf/gocode', {'rtp': 'nvim/'}`
|
||||
|
||||
And then update your packages by running `:PluginInstall`.
|
||||
|
||||
#### Using vim-plug in Neovim
|
||||
|
||||
Add the following line to your **init.vim**:
|
||||
|
||||
`Plug 'nsf/gocode', { 'rtp': 'nvim', 'do': '~/.config/nvim/plugged/gocode/nvim/symlink.sh' }`
|
||||
|
||||
And then update your packages by running `:PlugInstall`.
|
||||
|
||||
### Emacs setup
|
||||
|
||||
In order to install emacs script, you need to fulfill the following steps:
|
||||
|
||||
1. Install [auto-complete-mode](http://www.emacswiki.org/emacs/AutoComplete)
|
||||
|
||||
2. Copy **emacs/go-autocomplete.el** file from the gocode source distribution to a directory which is in your 'load-path' in emacs.
|
||||
|
||||
3. Add these lines to your **.emacs**:
|
||||
|
||||
(require 'go-autocomplete)
|
||||
(require 'auto-complete-config)
|
||||
(ac-config-default)
|
||||
|
||||
Also, there is an alternative plugin for emacs using company-mode. See `emacs-company/README` for installation instructions.
|
||||
|
||||
If you're a MacOSX user, you may find that script useful: https://github.com/purcell/exec-path-from-shell. It helps you with setting up the right environment variables as Go and gocode require it. By default it pulls the PATH, but don't forget to add the GOPATH as well, e.g.:
|
||||
|
||||
```
|
||||
(when (memq window-system '(mac ns))
|
||||
(exec-path-from-shell-initialize)
|
||||
(exec-path-from-shell-copy-env "GOPATH"))
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
You can change all available options using `gocode set` command. The config file uses json format and is usually stored somewhere in **~/.config/gocode** directory. On windows it's stored in the appropriate AppData folder. It's suggested to avoid modifying config file manually, do that using the `gocode set` command.
|
||||
|
||||
`gocode set` lists all options and their values.
|
||||
|
||||
`gocode set <option>` shows the value of that *option*.
|
||||
|
||||
`gocode set <option> <value>` sets the new *value* for that *option*.
|
||||
|
||||
- *propose-builtins*
|
||||
|
||||
A boolean option. If **true**, gocode will add built-in types, functions and constants to an autocompletion proposals. Default: **false**.
|
||||
|
||||
- *lib-path*
|
||||
|
||||
A string option. Allows you to add search paths for packages. By default, gocode only searches **$GOPATH/pkg/$GOOS_$GOARCH** and **$GOROOT/pkg/$GOOS_$GOARCH** in terms of previously existed environment variables. Also you can specify multiple paths using ':' (colon) as a separator (on Windows use semicolon ';'). The paths specified by *lib-path* are prepended to the default ones.
|
||||
|
||||
- *autobuild*
|
||||
|
||||
A boolean option. If **true**, gocode will try to automatically build out-of-date packages when their source files are modified, in order to obtain the freshest autocomplete results for them. This feature is experimental. Default: **false**.
|
||||
|
||||
- *force-debug-output*
|
||||
|
||||
A string option. If is not empty, gocode will forcefully redirect the logging into that file. Also forces enabling of the debug mode on the server side. Default: "" (empty).
|
||||
|
||||
- *package-lookup-mode*
|
||||
|
||||
A string option. If **go**, use standard Go package lookup rules. If **gb**, use gb-specific lookup rules. See https://github.com/constabulary/gb for details. Default: **go**.
|
||||
|
||||
- *close-timeout*
|
||||
|
||||
An integer option. If there have been no completion requests after this number of seconds, the gocode process will terminate. Defaults to 1800 (30 minutes).
|
||||
|
||||
### Debugging
|
||||
|
||||
If something went wrong, the first thing you may want to do is manually start the gocode daemon with a debug mode enabled and in a separate terminal window. It will show you all the stack traces, panics if any and additional info about autocompletion requests. Shutdown the daemon if it was already started and run a new one explicitly with a debug mode enabled:
|
||||
|
||||
`gocode close`
|
||||
|
||||
`gocode -s -debug`
|
||||
|
||||
Please, report bugs, feature suggestions and other rants to the [github issue tracker](http://github.com/nsf/gocode/issues) of this project.
|
||||
|
||||
### Developing
|
||||
|
||||
There is [Guide for IDE/editor plugin developers](docs/IDE_integration.md).
|
||||
|
||||
If you have troubles, please, contact me and I will try to do my best answering your questions. You can contact me via <a href="mailto:no.smile.face@gmail.com">email</a>. Or for short question find me on IRC: #go-nuts @ freenode.
|
||||
|
||||
### Misc
|
||||
|
||||
- It's a good idea to use the latest git version always. I'm trying to keep it in a working state.
|
||||
- Use `go install` (not `go build`) for building a local source tree. The objects in `pkg/` are needed for Gocode to work.
|
|
@ -0,0 +1,689 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// out_buffers
|
||||
//
|
||||
// Temporary structure for writing autocomplete response.
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
// fields must be exported for RPC
|
||||
type candidate struct {
|
||||
Name string
|
||||
Type string
|
||||
Class decl_class
|
||||
}
|
||||
|
||||
type out_buffers struct {
|
||||
tmpbuf *bytes.Buffer
|
||||
candidates []candidate
|
||||
ctx *auto_complete_context
|
||||
tmpns map[string]bool
|
||||
ignorecase bool
|
||||
}
|
||||
|
||||
func new_out_buffers(ctx *auto_complete_context) *out_buffers {
|
||||
b := new(out_buffers)
|
||||
b.tmpbuf = bytes.NewBuffer(make([]byte, 0, 1024))
|
||||
b.candidates = make([]candidate, 0, 64)
|
||||
b.ctx = ctx
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *out_buffers) Len() int {
|
||||
return len(b.candidates)
|
||||
}
|
||||
|
||||
func (b *out_buffers) Less(i, j int) bool {
|
||||
x := b.candidates[i]
|
||||
y := b.candidates[j]
|
||||
if x.Class == y.Class {
|
||||
return x.Name < y.Name
|
||||
}
|
||||
return x.Class < y.Class
|
||||
}
|
||||
|
||||
func (b *out_buffers) Swap(i, j int) {
|
||||
b.candidates[i], b.candidates[j] = b.candidates[j], b.candidates[i]
|
||||
}
|
||||
|
||||
func (b *out_buffers) append_decl(p, name string, decl *decl, class decl_class) {
|
||||
c1 := !g_config.ProposeBuiltins && decl.scope == g_universe_scope && decl.name != "Error"
|
||||
c2 := class != decl_invalid && decl.class != class
|
||||
c3 := class == decl_invalid && !has_prefix(name, p, b.ignorecase)
|
||||
c4 := !decl.matches()
|
||||
c5 := !check_type_expr(decl.typ)
|
||||
|
||||
if c1 || c2 || c3 || c4 || c5 {
|
||||
return
|
||||
}
|
||||
|
||||
decl.pretty_print_type(b.tmpbuf)
|
||||
b.candidates = append(b.candidates, candidate{
|
||||
Name: name,
|
||||
Type: b.tmpbuf.String(),
|
||||
Class: decl.class,
|
||||
})
|
||||
b.tmpbuf.Reset()
|
||||
}
|
||||
|
||||
func (b *out_buffers) append_embedded(p string, decl *decl, class decl_class) {
|
||||
if decl.embedded == nil {
|
||||
return
|
||||
}
|
||||
|
||||
first_level := false
|
||||
if b.tmpns == nil {
|
||||
// first level, create tmp namespace
|
||||
b.tmpns = make(map[string]bool)
|
||||
first_level = true
|
||||
|
||||
// add all children of the current decl to the namespace
|
||||
for _, c := range decl.children {
|
||||
b.tmpns[c.name] = true
|
||||
}
|
||||
}
|
||||
|
||||
for _, emb := range decl.embedded {
|
||||
typedecl := type_to_decl(emb, decl.scope)
|
||||
if typedecl == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// prevent infinite recursion here
|
||||
if typedecl.flags&decl_visited != 0 {
|
||||
continue
|
||||
}
|
||||
typedecl.flags |= decl_visited
|
||||
defer typedecl.clear_visited()
|
||||
|
||||
for _, c := range typedecl.children {
|
||||
if _, has := b.tmpns[c.name]; has {
|
||||
continue
|
||||
}
|
||||
b.append_decl(p, c.name, c, class)
|
||||
b.tmpns[c.name] = true
|
||||
}
|
||||
b.append_embedded(p, typedecl, class)
|
||||
}
|
||||
|
||||
if first_level {
|
||||
// remove tmp namespace
|
||||
b.tmpns = nil
|
||||
}
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// auto_complete_context
|
||||
//
|
||||
// Context that holds cache structures for autocompletion needs. It
|
||||
// includes cache for packages and for main package files.
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type auto_complete_context struct {
|
||||
current *auto_complete_file // currently edited file
|
||||
others []*decl_file_cache // other files of the current package
|
||||
pkg *scope
|
||||
|
||||
pcache package_cache // packages cache
|
||||
declcache *decl_cache // top-level declarations cache
|
||||
}
|
||||
|
||||
func new_auto_complete_context(pcache package_cache, declcache *decl_cache) *auto_complete_context {
|
||||
c := new(auto_complete_context)
|
||||
c.current = new_auto_complete_file("", declcache.context)
|
||||
c.pcache = pcache
|
||||
c.declcache = declcache
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *auto_complete_context) update_caches() {
|
||||
// temporary map for packages that we need to check for a cache expiration
|
||||
// map is used as a set of unique items to prevent double checks
|
||||
ps := make(map[string]*package_file_cache)
|
||||
|
||||
// collect import information from all of the files
|
||||
c.pcache.append_packages(ps, c.current.packages)
|
||||
c.others = get_other_package_files(c.current.name, c.current.package_name, c.declcache)
|
||||
for _, other := range c.others {
|
||||
c.pcache.append_packages(ps, other.packages)
|
||||
}
|
||||
|
||||
update_packages(ps)
|
||||
|
||||
// fix imports for all files
|
||||
fixup_packages(c.current.filescope, c.current.packages, c.pcache)
|
||||
for _, f := range c.others {
|
||||
fixup_packages(f.filescope, f.packages, c.pcache)
|
||||
}
|
||||
|
||||
// At this point we have collected all top level declarations, now we need to
|
||||
// merge them in the common package block.
|
||||
c.merge_decls()
|
||||
}
|
||||
|
||||
func (c *auto_complete_context) merge_decls() {
|
||||
c.pkg = new_scope(g_universe_scope)
|
||||
merge_decls(c.current.filescope, c.pkg, c.current.decls)
|
||||
merge_decls_from_packages(c.pkg, c.current.packages, c.pcache)
|
||||
for _, f := range c.others {
|
||||
merge_decls(f.filescope, c.pkg, f.decls)
|
||||
merge_decls_from_packages(c.pkg, f.packages, c.pcache)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *auto_complete_context) make_decl_set(scope *scope) map[string]*decl {
|
||||
set := make(map[string]*decl, len(c.pkg.entities)*2)
|
||||
make_decl_set_recursive(set, scope)
|
||||
return set
|
||||
}
|
||||
|
||||
func (c *auto_complete_context) get_candidates_from_set(set map[string]*decl, partial string, class decl_class, b *out_buffers) {
|
||||
for key, value := range set {
|
||||
if value == nil {
|
||||
continue
|
||||
}
|
||||
value.infer_type()
|
||||
b.append_decl(partial, key, value, class)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *auto_complete_context) get_candidates_from_decl(cc cursor_context, class decl_class, b *out_buffers) {
|
||||
// propose all children of a subject declaration and
|
||||
for _, decl := range cc.decl.children {
|
||||
if cc.decl.class == decl_package && !ast.IsExported(decl.name) {
|
||||
continue
|
||||
}
|
||||
if cc.struct_field {
|
||||
// if we're autocompleting struct field init, skip all methods
|
||||
if _, ok := decl.typ.(*ast.FuncType); ok {
|
||||
continue
|
||||
}
|
||||
}
|
||||
b.append_decl(cc.partial, decl.name, decl, class)
|
||||
}
|
||||
// propose all children of an underlying struct/interface type
|
||||
adecl := advance_to_struct_or_interface(cc.decl)
|
||||
if adecl != nil && adecl != cc.decl {
|
||||
for _, decl := range adecl.children {
|
||||
if decl.class == decl_var {
|
||||
b.append_decl(cc.partial, decl.name, decl, class)
|
||||
}
|
||||
}
|
||||
}
|
||||
// propose all children of its embedded types
|
||||
b.append_embedded(cc.partial, cc.decl, class)
|
||||
}
|
||||
|
||||
func (c *auto_complete_context) get_import_candidates(partial string, b *out_buffers) {
|
||||
pkgdirs := g_daemon.context.pkg_dirs()
|
||||
resultSet := map[string]struct{}{}
|
||||
for _, pkgdir := range pkgdirs {
|
||||
// convert srcpath to pkgpath and get candidates
|
||||
get_import_candidates_dir(pkgdir, filepath.FromSlash(partial), b.ignorecase, resultSet)
|
||||
}
|
||||
for k := range resultSet {
|
||||
b.candidates = append(b.candidates, candidate{Name: k, Class: decl_import})
|
||||
}
|
||||
}
|
||||
|
||||
func get_import_candidates_dir(root, partial string, ignorecase bool, r map[string]struct{}) {
|
||||
var fpath string
|
||||
var match bool
|
||||
if strings.HasSuffix(partial, "/") {
|
||||
fpath = filepath.Join(root, partial)
|
||||
} else {
|
||||
fpath = filepath.Join(root, filepath.Dir(partial))
|
||||
match = true
|
||||
}
|
||||
fi := readdir(fpath)
|
||||
for i := range fi {
|
||||
name := fi[i].Name()
|
||||
rel, err := filepath.Rel(root, filepath.Join(fpath, name))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if match && !has_prefix(rel, partial, ignorecase) {
|
||||
continue
|
||||
} else if fi[i].IsDir() {
|
||||
get_import_candidates_dir(root, rel+string(filepath.Separator), ignorecase, r)
|
||||
} else {
|
||||
ext := filepath.Ext(name)
|
||||
if ext != ".a" {
|
||||
continue
|
||||
} else {
|
||||
rel = rel[0 : len(rel)-2]
|
||||
}
|
||||
r[vendorlessImportPath(filepath.ToSlash(rel))] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// returns three slices of the same length containing:
|
||||
// 1. apropos names
|
||||
// 2. apropos types (pretty-printed)
|
||||
// 3. apropos classes
|
||||
// and length of the part that should be replaced (if any)
|
||||
func (c *auto_complete_context) apropos(file []byte, filename string, cursor int) ([]candidate, int) {
|
||||
c.current.cursor = cursor
|
||||
c.current.name = filename
|
||||
|
||||
// Update caches and parse the current file.
|
||||
// This process is quite complicated, because I was trying to design it in a
|
||||
// concurrent fashion. Apparently I'm not really good at that. Hopefully
|
||||
// will be better in future.
|
||||
|
||||
// Ugly hack, but it actually may help in some cases. Insert a
|
||||
// semicolon right at the cursor location.
|
||||
filesemi := make([]byte, len(file)+1)
|
||||
copy(filesemi, file[:cursor])
|
||||
filesemi[cursor] = ';'
|
||||
copy(filesemi[cursor+1:], file[cursor:])
|
||||
|
||||
// Does full processing of the currently edited file (top-level declarations plus
|
||||
// active function).
|
||||
c.current.process_data(filesemi)
|
||||
|
||||
// Updates cache of other files and packages. See the function for details of
|
||||
// the process. At the end merges all the top-level declarations into the package
|
||||
// block.
|
||||
c.update_caches()
|
||||
|
||||
// And we're ready to Go. ;)
|
||||
|
||||
b := new_out_buffers(c)
|
||||
|
||||
partial := 0
|
||||
cc, ok := c.deduce_cursor_context(file, cursor)
|
||||
if !ok {
|
||||
var d *decl
|
||||
if ident, ok := cc.expr.(*ast.Ident); ok && g_config.UnimportedPackages {
|
||||
d = resolveKnownPackageIdent(ident.Name, c.current.name, c.current.context)
|
||||
}
|
||||
if d == nil {
|
||||
return nil, 0
|
||||
}
|
||||
cc.decl = d
|
||||
}
|
||||
|
||||
class := decl_invalid
|
||||
switch cc.partial {
|
||||
case "const":
|
||||
class = decl_const
|
||||
case "var":
|
||||
class = decl_var
|
||||
case "type":
|
||||
class = decl_type
|
||||
case "func":
|
||||
class = decl_func
|
||||
case "package":
|
||||
class = decl_package
|
||||
}
|
||||
|
||||
if cc.decl_import {
|
||||
c.get_import_candidates(cc.partial, b)
|
||||
if cc.partial != "" && len(b.candidates) == 0 {
|
||||
// as a fallback, try case insensitive approach
|
||||
b.ignorecase = true
|
||||
c.get_import_candidates(cc.partial, b)
|
||||
}
|
||||
} else if cc.decl == nil {
|
||||
// In case if no declaraion is a subject of completion, propose all:
|
||||
set := c.make_decl_set(c.current.scope)
|
||||
c.get_candidates_from_set(set, cc.partial, class, b)
|
||||
if cc.partial != "" && len(b.candidates) == 0 {
|
||||
// as a fallback, try case insensitive approach
|
||||
b.ignorecase = true
|
||||
c.get_candidates_from_set(set, cc.partial, class, b)
|
||||
}
|
||||
} else {
|
||||
c.get_candidates_from_decl(cc, class, b)
|
||||
if cc.partial != "" && len(b.candidates) == 0 {
|
||||
// as a fallback, try case insensitive approach
|
||||
b.ignorecase = true
|
||||
c.get_candidates_from_decl(cc, class, b)
|
||||
}
|
||||
}
|
||||
partial = len(cc.partial)
|
||||
|
||||
if len(b.candidates) == 0 {
|
||||
return nil, 0
|
||||
}
|
||||
|
||||
sort.Sort(b)
|
||||
return b.candidates, partial
|
||||
}
|
||||
|
||||
func update_packages(ps map[string]*package_file_cache) {
|
||||
// initiate package cache update
|
||||
done := make(chan bool)
|
||||
for _, p := range ps {
|
||||
go func(p *package_file_cache) {
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
print_backtrace(err)
|
||||
done <- false
|
||||
}
|
||||
}()
|
||||
p.update_cache()
|
||||
done <- true
|
||||
}(p)
|
||||
}
|
||||
|
||||
// wait for its completion
|
||||
for _ = range ps {
|
||||
if !<-done {
|
||||
panic("One of the package cache updaters panicked")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func merge_decls(filescope *scope, pkg *scope, decls map[string]*decl) {
|
||||
for _, d := range decls {
|
||||
pkg.merge_decl(d)
|
||||
}
|
||||
filescope.parent = pkg
|
||||
}
|
||||
|
||||
func merge_decls_from_packages(pkgscope *scope, pkgs []package_import, pcache package_cache) {
|
||||
for _, p := range pkgs {
|
||||
path, alias := p.path, p.alias
|
||||
if alias != "." {
|
||||
continue
|
||||
}
|
||||
p := pcache[path].main
|
||||
if p == nil {
|
||||
continue
|
||||
}
|
||||
for _, d := range p.children {
|
||||
if ast.IsExported(d.name) {
|
||||
pkgscope.merge_decl(d)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func fixup_packages(filescope *scope, pkgs []package_import, pcache package_cache) {
|
||||
for _, p := range pkgs {
|
||||
path, alias := p.path, p.alias
|
||||
if alias == "" {
|
||||
alias = pcache[path].defalias
|
||||
}
|
||||
// skip packages that will be merged to the package scope
|
||||
if alias == "." {
|
||||
continue
|
||||
}
|
||||
filescope.replace_decl(alias, pcache[path].main)
|
||||
}
|
||||
}
|
||||
|
||||
func get_other_package_files(filename, packageName string, declcache *decl_cache) []*decl_file_cache {
|
||||
others := find_other_package_files(filename, packageName)
|
||||
|
||||
ret := make([]*decl_file_cache, len(others))
|
||||
done := make(chan *decl_file_cache)
|
||||
|
||||
for _, nm := range others {
|
||||
go func(name string) {
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
print_backtrace(err)
|
||||
done <- nil
|
||||
}
|
||||
}()
|
||||
done <- declcache.get_and_update(name)
|
||||
}(nm)
|
||||
}
|
||||
|
||||
for i := range others {
|
||||
ret[i] = <-done
|
||||
if ret[i] == nil {
|
||||
panic("One of the decl cache updaters panicked")
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func find_other_package_files(filename, package_name string) []string {
|
||||
if filename == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
dir, file := filepath.Split(filename)
|
||||
files_in_dir, err := readdir_lstat(dir)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
count := 0
|
||||
for _, stat := range files_in_dir {
|
||||
ok, _ := filepath.Match("*.go", stat.Name())
|
||||
if !ok || stat.Name() == file {
|
||||
continue
|
||||
}
|
||||
count++
|
||||
}
|
||||
|
||||
out := make([]string, 0, count)
|
||||
for _, stat := range files_in_dir {
|
||||
const non_regular = os.ModeDir | os.ModeSymlink |
|
||||
os.ModeDevice | os.ModeNamedPipe | os.ModeSocket
|
||||
|
||||
ok, _ := filepath.Match("*.go", stat.Name())
|
||||
if !ok || stat.Name() == file || stat.Mode()&non_regular != 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
abspath := filepath.Join(dir, stat.Name())
|
||||
if file_package_name(abspath) == package_name {
|
||||
n := len(out)
|
||||
out = out[:n+1]
|
||||
out[n] = abspath
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func file_package_name(filename string) string {
|
||||
file, _ := parser.ParseFile(token.NewFileSet(), filename, nil, parser.PackageClauseOnly)
|
||||
return file.Name.Name
|
||||
}
|
||||
|
||||
func make_decl_set_recursive(set map[string]*decl, scope *scope) {
|
||||
for name, ent := range scope.entities {
|
||||
if _, ok := set[name]; !ok {
|
||||
set[name] = ent
|
||||
}
|
||||
}
|
||||
if scope.parent != nil {
|
||||
make_decl_set_recursive(set, scope.parent)
|
||||
}
|
||||
}
|
||||
|
||||
func check_func_field_list(f *ast.FieldList) bool {
|
||||
if f == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, field := range f.List {
|
||||
if !check_type_expr(field.Type) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// checks for a type expression correctness, it the type expression has
|
||||
// ast.BadExpr somewhere, returns false, otherwise true
|
||||
func check_type_expr(e ast.Expr) bool {
|
||||
switch t := e.(type) {
|
||||
case *ast.StarExpr:
|
||||
return check_type_expr(t.X)
|
||||
case *ast.ArrayType:
|
||||
return check_type_expr(t.Elt)
|
||||
case *ast.SelectorExpr:
|
||||
return check_type_expr(t.X)
|
||||
case *ast.FuncType:
|
||||
a := check_func_field_list(t.Params)
|
||||
b := check_func_field_list(t.Results)
|
||||
return a && b
|
||||
case *ast.MapType:
|
||||
a := check_type_expr(t.Key)
|
||||
b := check_type_expr(t.Value)
|
||||
return a && b
|
||||
case *ast.Ellipsis:
|
||||
return check_type_expr(t.Elt)
|
||||
case *ast.ChanType:
|
||||
return check_type_expr(t.Value)
|
||||
case *ast.BadExpr:
|
||||
return false
|
||||
default:
|
||||
return true
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// Status output
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type decl_slice []*decl
|
||||
|
||||
func (s decl_slice) Less(i, j int) bool {
|
||||
if s[i].class != s[j].class {
|
||||
return s[i].name < s[j].name
|
||||
}
|
||||
return s[i].class < s[j].class
|
||||
}
|
||||
func (s decl_slice) Len() int { return len(s) }
|
||||
func (s decl_slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||
|
||||
const (
|
||||
color_red = "\033[0;31m"
|
||||
color_red_bold = "\033[1;31m"
|
||||
color_green = "\033[0;32m"
|
||||
color_green_bold = "\033[1;32m"
|
||||
color_yellow = "\033[0;33m"
|
||||
color_yellow_bold = "\033[1;33m"
|
||||
color_blue = "\033[0;34m"
|
||||
color_blue_bold = "\033[1;34m"
|
||||
color_magenta = "\033[0;35m"
|
||||
color_magenta_bold = "\033[1;35m"
|
||||
color_cyan = "\033[0;36m"
|
||||
color_cyan_bold = "\033[1;36m"
|
||||
color_white = "\033[0;37m"
|
||||
color_white_bold = "\033[1;37m"
|
||||
color_none = "\033[0m"
|
||||
)
|
||||
|
||||
var g_decl_class_to_color = [...]string{
|
||||
decl_const: color_white_bold,
|
||||
decl_var: color_magenta,
|
||||
decl_type: color_cyan,
|
||||
decl_func: color_green,
|
||||
decl_package: color_red,
|
||||
decl_methods_stub: color_red,
|
||||
}
|
||||
|
||||
var g_decl_class_to_string_status = [...]string{
|
||||
decl_const: " const",
|
||||
decl_var: " var",
|
||||
decl_type: " type",
|
||||
decl_func: " func",
|
||||
decl_package: "package",
|
||||
decl_methods_stub: " stub",
|
||||
}
|
||||
|
||||
func (c *auto_complete_context) status() string {
|
||||
|
||||
buf := bytes.NewBuffer(make([]byte, 0, 4096))
|
||||
fmt.Fprintf(buf, "Server's GOMAXPROCS == %d\n", runtime.GOMAXPROCS(0))
|
||||
fmt.Fprintf(buf, "\nPackage cache contains %d entries\n", len(c.pcache))
|
||||
fmt.Fprintf(buf, "\nListing these entries:\n")
|
||||
for _, mod := range c.pcache {
|
||||
fmt.Fprintf(buf, "\tname: %s (default alias: %s)\n", mod.name, mod.defalias)
|
||||
fmt.Fprintf(buf, "\timports %d declarations and %d packages\n", len(mod.main.children), len(mod.others))
|
||||
if mod.mtime == -1 {
|
||||
fmt.Fprintf(buf, "\tthis package stays in cache forever (built-in package)\n")
|
||||
} else {
|
||||
mtime := time.Unix(0, mod.mtime)
|
||||
fmt.Fprintf(buf, "\tlast modification time: %s\n", mtime)
|
||||
}
|
||||
fmt.Fprintf(buf, "\n")
|
||||
}
|
||||
if c.current.name != "" {
|
||||
fmt.Fprintf(buf, "Last edited file: %s (package: %s)\n", c.current.name, c.current.package_name)
|
||||
if len(c.others) > 0 {
|
||||
fmt.Fprintf(buf, "\nOther files from the current package:\n")
|
||||
}
|
||||
for _, f := range c.others {
|
||||
fmt.Fprintf(buf, "\t%s\n", f.name)
|
||||
}
|
||||
fmt.Fprintf(buf, "\nListing declarations from files:\n")
|
||||
|
||||
const status_decls = "\t%s%s" + color_none + " " + color_yellow + "%s" + color_none + "\n"
|
||||
const status_decls_children = "\t%s%s" + color_none + " " + color_yellow + "%s" + color_none + " (%d)\n"
|
||||
|
||||
fmt.Fprintf(buf, "\n%s:\n", c.current.name)
|
||||
ds := make(decl_slice, len(c.current.decls))
|
||||
i := 0
|
||||
for _, d := range c.current.decls {
|
||||
ds[i] = d
|
||||
i++
|
||||
}
|
||||
sort.Sort(ds)
|
||||
for _, d := range ds {
|
||||
if len(d.children) > 0 {
|
||||
fmt.Fprintf(buf, status_decls_children,
|
||||
g_decl_class_to_color[d.class],
|
||||
g_decl_class_to_string_status[d.class],
|
||||
d.name, len(d.children))
|
||||
} else {
|
||||
fmt.Fprintf(buf, status_decls,
|
||||
g_decl_class_to_color[d.class],
|
||||
g_decl_class_to_string_status[d.class],
|
||||
d.name)
|
||||
}
|
||||
}
|
||||
|
||||
for _, f := range c.others {
|
||||
fmt.Fprintf(buf, "\n%s:\n", f.name)
|
||||
ds = make(decl_slice, len(f.decls))
|
||||
i = 0
|
||||
for _, d := range f.decls {
|
||||
ds[i] = d
|
||||
i++
|
||||
}
|
||||
sort.Sort(ds)
|
||||
for _, d := range ds {
|
||||
if len(d.children) > 0 {
|
||||
fmt.Fprintf(buf, status_decls_children,
|
||||
g_decl_class_to_color[d.class],
|
||||
g_decl_class_to_string_status[d.class],
|
||||
d.name, len(d.children))
|
||||
} else {
|
||||
fmt.Fprintf(buf, status_decls,
|
||||
g_decl_class_to_color[d.class],
|
||||
g_decl_class_to_string_status[d.class],
|
||||
d.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return buf.String()
|
||||
}
|
|
@ -0,0 +1,418 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/scanner"
|
||||
"go/token"
|
||||
"log"
|
||||
)
|
||||
|
||||
func parse_decl_list(fset *token.FileSet, data []byte) ([]ast.Decl, error) {
|
||||
var buf bytes.Buffer
|
||||
buf.WriteString("package p;")
|
||||
buf.Write(data)
|
||||
file, err := parser.ParseFile(fset, "", buf.Bytes(), parser.AllErrors)
|
||||
if err != nil {
|
||||
return file.Decls, err
|
||||
}
|
||||
return file.Decls, nil
|
||||
}
|
||||
|
||||
func log_parse_error(intro string, err error) {
|
||||
if el, ok := err.(scanner.ErrorList); ok {
|
||||
log.Printf("%s:", intro)
|
||||
for _, er := range el {
|
||||
log.Printf(" %s", er)
|
||||
}
|
||||
} else {
|
||||
log.Printf("%s: %s", intro, err)
|
||||
}
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// auto_complete_file
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type auto_complete_file struct {
|
||||
name string
|
||||
package_name string
|
||||
|
||||
decls map[string]*decl
|
||||
packages []package_import
|
||||
filescope *scope
|
||||
scope *scope
|
||||
|
||||
cursor int // for current file buffer only
|
||||
fset *token.FileSet
|
||||
context *package_lookup_context
|
||||
}
|
||||
|
||||
func new_auto_complete_file(name string, context *package_lookup_context) *auto_complete_file {
|
||||
p := new(auto_complete_file)
|
||||
p.name = name
|
||||
p.cursor = -1
|
||||
p.fset = token.NewFileSet()
|
||||
p.context = context
|
||||
return p
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) offset(p token.Pos) int {
|
||||
const fixlen = len("package p;")
|
||||
return f.fset.Position(p).Offset - fixlen
|
||||
}
|
||||
|
||||
// this one is used for current file buffer exclusively
|
||||
func (f *auto_complete_file) process_data(data []byte) {
|
||||
cur, filedata, block := rip_off_decl(data, f.cursor)
|
||||
file, err := parser.ParseFile(f.fset, "", filedata, parser.AllErrors)
|
||||
if err != nil && *g_debug {
|
||||
log_parse_error("Error parsing input file (outer block)", err)
|
||||
}
|
||||
f.package_name = package_name(file)
|
||||
|
||||
f.decls = make(map[string]*decl)
|
||||
f.packages = collect_package_imports(f.name, file.Decls, f.context)
|
||||
f.filescope = new_scope(nil)
|
||||
f.scope = f.filescope
|
||||
|
||||
for _, d := range file.Decls {
|
||||
anonymify_ast(d, 0, f.filescope)
|
||||
}
|
||||
|
||||
// process all top-level declarations
|
||||
for _, decl := range file.Decls {
|
||||
append_to_top_decls(f.decls, decl, f.scope)
|
||||
}
|
||||
if block != nil {
|
||||
// process local function as top-level declaration
|
||||
decls, err := parse_decl_list(f.fset, block)
|
||||
if err != nil && *g_debug {
|
||||
log_parse_error("Error parsing input file (inner block)", err)
|
||||
}
|
||||
|
||||
for _, d := range decls {
|
||||
anonymify_ast(d, 0, f.filescope)
|
||||
}
|
||||
|
||||
for _, decl := range decls {
|
||||
append_to_top_decls(f.decls, decl, f.scope)
|
||||
}
|
||||
|
||||
// process function internals
|
||||
f.cursor = cur
|
||||
for _, decl := range decls {
|
||||
f.process_decl_locals(decl)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) process_decl_locals(decl ast.Decl) {
|
||||
switch t := decl.(type) {
|
||||
case *ast.FuncDecl:
|
||||
if f.cursor_in(t.Body) {
|
||||
s := f.scope
|
||||
f.scope = new_scope(f.scope)
|
||||
|
||||
f.process_field_list(t.Recv, s)
|
||||
f.process_field_list(t.Type.Params, s)
|
||||
f.process_field_list(t.Type.Results, s)
|
||||
f.process_block_stmt(t.Body)
|
||||
}
|
||||
default:
|
||||
v := new(func_lit_visitor)
|
||||
v.ctx = f
|
||||
ast.Walk(v, decl)
|
||||
}
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) process_decl(decl ast.Decl) {
|
||||
if t, ok := decl.(*ast.GenDecl); ok && f.offset(t.TokPos) > f.cursor {
|
||||
return
|
||||
}
|
||||
prevscope := f.scope
|
||||
foreach_decl(decl, func(data *foreach_decl_struct) {
|
||||
class := ast_decl_class(data.decl)
|
||||
if class != decl_type {
|
||||
f.scope, prevscope = advance_scope(f.scope)
|
||||
}
|
||||
for i, name := range data.names {
|
||||
typ, v, vi := data.type_value_index(i)
|
||||
|
||||
d := new_decl_full(name.Name, class, 0, typ, v, vi, prevscope)
|
||||
if d == nil {
|
||||
return
|
||||
}
|
||||
|
||||
f.scope.add_named_decl(d)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) process_block_stmt(block *ast.BlockStmt) {
|
||||
if block != nil && f.cursor_in(block) {
|
||||
f.scope, _ = advance_scope(f.scope)
|
||||
|
||||
for _, stmt := range block.List {
|
||||
f.process_stmt(stmt)
|
||||
}
|
||||
|
||||
// hack to process all func literals
|
||||
v := new(func_lit_visitor)
|
||||
v.ctx = f
|
||||
ast.Walk(v, block)
|
||||
}
|
||||
}
|
||||
|
||||
type func_lit_visitor struct {
|
||||
ctx *auto_complete_file
|
||||
}
|
||||
|
||||
func (v *func_lit_visitor) Visit(node ast.Node) ast.Visitor {
|
||||
if t, ok := node.(*ast.FuncLit); ok && v.ctx.cursor_in(t.Body) {
|
||||
s := v.ctx.scope
|
||||
v.ctx.scope = new_scope(v.ctx.scope)
|
||||
|
||||
v.ctx.process_field_list(t.Type.Params, s)
|
||||
v.ctx.process_field_list(t.Type.Results, s)
|
||||
v.ctx.process_block_stmt(t.Body)
|
||||
|
||||
return nil
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) process_stmt(stmt ast.Stmt) {
|
||||
switch t := stmt.(type) {
|
||||
case *ast.DeclStmt:
|
||||
f.process_decl(t.Decl)
|
||||
case *ast.AssignStmt:
|
||||
f.process_assign_stmt(t)
|
||||
case *ast.IfStmt:
|
||||
if f.cursor_in_if_head(t) {
|
||||
f.process_stmt(t.Init)
|
||||
} else if f.cursor_in_if_stmt(t) {
|
||||
f.scope, _ = advance_scope(f.scope)
|
||||
f.process_stmt(t.Init)
|
||||
f.process_block_stmt(t.Body)
|
||||
f.process_stmt(t.Else)
|
||||
}
|
||||
case *ast.BlockStmt:
|
||||
f.process_block_stmt(t)
|
||||
case *ast.RangeStmt:
|
||||
f.process_range_stmt(t)
|
||||
case *ast.ForStmt:
|
||||
if f.cursor_in_for_head(t) {
|
||||
f.process_stmt(t.Init)
|
||||
} else if f.cursor_in(t.Body) {
|
||||
f.scope, _ = advance_scope(f.scope)
|
||||
|
||||
f.process_stmt(t.Init)
|
||||
f.process_block_stmt(t.Body)
|
||||
}
|
||||
case *ast.SwitchStmt:
|
||||
f.process_switch_stmt(t)
|
||||
case *ast.TypeSwitchStmt:
|
||||
f.process_type_switch_stmt(t)
|
||||
case *ast.SelectStmt:
|
||||
f.process_select_stmt(t)
|
||||
case *ast.LabeledStmt:
|
||||
f.process_stmt(t.Stmt)
|
||||
}
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) process_select_stmt(a *ast.SelectStmt) {
|
||||
if !f.cursor_in(a.Body) {
|
||||
return
|
||||
}
|
||||
var prevscope *scope
|
||||
f.scope, prevscope = advance_scope(f.scope)
|
||||
|
||||
var last_cursor_after *ast.CommClause
|
||||
for _, s := range a.Body.List {
|
||||
if cc := s.(*ast.CommClause); f.cursor > f.offset(cc.Colon) {
|
||||
last_cursor_after = cc
|
||||
}
|
||||
}
|
||||
|
||||
if last_cursor_after != nil {
|
||||
if last_cursor_after.Comm != nil {
|
||||
//if lastCursorAfter.Lhs != nil && lastCursorAfter.Tok == token.DEFINE {
|
||||
if astmt, ok := last_cursor_after.Comm.(*ast.AssignStmt); ok && astmt.Tok == token.DEFINE {
|
||||
vname := astmt.Lhs[0].(*ast.Ident).Name
|
||||
v := new_decl_var(vname, nil, astmt.Rhs[0], -1, prevscope)
|
||||
f.scope.add_named_decl(v)
|
||||
}
|
||||
}
|
||||
for _, s := range last_cursor_after.Body {
|
||||
f.process_stmt(s)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) process_type_switch_stmt(a *ast.TypeSwitchStmt) {
|
||||
if !f.cursor_in(a.Body) {
|
||||
return
|
||||
}
|
||||
var prevscope *scope
|
||||
f.scope, prevscope = advance_scope(f.scope)
|
||||
|
||||
f.process_stmt(a.Init)
|
||||
// type var
|
||||
var tv *decl
|
||||
if a, ok := a.Assign.(*ast.AssignStmt); ok {
|
||||
lhs := a.Lhs
|
||||
rhs := a.Rhs
|
||||
if lhs != nil && len(lhs) == 1 {
|
||||
tvname := lhs[0].(*ast.Ident).Name
|
||||
tv = new_decl_var(tvname, nil, rhs[0], -1, prevscope)
|
||||
}
|
||||
}
|
||||
|
||||
var last_cursor_after *ast.CaseClause
|
||||
for _, s := range a.Body.List {
|
||||
if cc := s.(*ast.CaseClause); f.cursor > f.offset(cc.Colon) {
|
||||
last_cursor_after = cc
|
||||
}
|
||||
}
|
||||
|
||||
if last_cursor_after != nil {
|
||||
if tv != nil {
|
||||
if last_cursor_after.List != nil && len(last_cursor_after.List) == 1 {
|
||||
tv.typ = last_cursor_after.List[0]
|
||||
tv.value = nil
|
||||
}
|
||||
f.scope.add_named_decl(tv)
|
||||
}
|
||||
for _, s := range last_cursor_after.Body {
|
||||
f.process_stmt(s)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) process_switch_stmt(a *ast.SwitchStmt) {
|
||||
if !f.cursor_in(a.Body) {
|
||||
return
|
||||
}
|
||||
f.scope, _ = advance_scope(f.scope)
|
||||
|
||||
f.process_stmt(a.Init)
|
||||
var last_cursor_after *ast.CaseClause
|
||||
for _, s := range a.Body.List {
|
||||
if cc := s.(*ast.CaseClause); f.cursor > f.offset(cc.Colon) {
|
||||
last_cursor_after = cc
|
||||
}
|
||||
}
|
||||
if last_cursor_after != nil {
|
||||
for _, s := range last_cursor_after.Body {
|
||||
f.process_stmt(s)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) process_range_stmt(a *ast.RangeStmt) {
|
||||
if !f.cursor_in(a.Body) {
|
||||
return
|
||||
}
|
||||
var prevscope *scope
|
||||
f.scope, prevscope = advance_scope(f.scope)
|
||||
|
||||
if a.Tok == token.DEFINE {
|
||||
if t, ok := a.Key.(*ast.Ident); ok {
|
||||
d := new_decl_var(t.Name, nil, a.X, 0, prevscope)
|
||||
if d != nil {
|
||||
d.flags |= decl_rangevar
|
||||
f.scope.add_named_decl(d)
|
||||
}
|
||||
}
|
||||
|
||||
if a.Value != nil {
|
||||
if t, ok := a.Value.(*ast.Ident); ok {
|
||||
d := new_decl_var(t.Name, nil, a.X, 1, prevscope)
|
||||
if d != nil {
|
||||
d.flags |= decl_rangevar
|
||||
f.scope.add_named_decl(d)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
f.process_block_stmt(a.Body)
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) process_assign_stmt(a *ast.AssignStmt) {
|
||||
if a.Tok != token.DEFINE || f.offset(a.TokPos) > f.cursor {
|
||||
return
|
||||
}
|
||||
|
||||
names := make([]*ast.Ident, len(a.Lhs))
|
||||
for i, name := range a.Lhs {
|
||||
id, ok := name.(*ast.Ident)
|
||||
if !ok {
|
||||
// something is wrong, just ignore the whole stmt
|
||||
return
|
||||
}
|
||||
names[i] = id
|
||||
}
|
||||
|
||||
var prevscope *scope
|
||||
f.scope, prevscope = advance_scope(f.scope)
|
||||
|
||||
pack := decl_pack{names, nil, a.Rhs}
|
||||
for i, name := range pack.names {
|
||||
typ, v, vi := pack.type_value_index(i)
|
||||
d := new_decl_var(name.Name, typ, v, vi, prevscope)
|
||||
if d == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
f.scope.add_named_decl(d)
|
||||
}
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) process_field_list(field_list *ast.FieldList, s *scope) {
|
||||
if field_list != nil {
|
||||
decls := ast_field_list_to_decls(field_list, decl_var, 0, s, false)
|
||||
for _, d := range decls {
|
||||
f.scope.add_named_decl(d)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) cursor_in_if_head(s *ast.IfStmt) bool {
|
||||
if f.cursor > f.offset(s.If) && f.cursor <= f.offset(s.Body.Lbrace) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) cursor_in_if_stmt(s *ast.IfStmt) bool {
|
||||
if f.cursor > f.offset(s.If) {
|
||||
// magic -10 comes from auto_complete_file.offset method, see
|
||||
// len() expr in there
|
||||
if f.offset(s.End()) == -10 || f.cursor < f.offset(s.End()) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) cursor_in_for_head(s *ast.ForStmt) bool {
|
||||
if f.cursor > f.offset(s.For) && f.cursor <= f.offset(s.Body.Lbrace) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (f *auto_complete_file) cursor_in(block *ast.BlockStmt) bool {
|
||||
if f.cursor == -1 || block == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if f.cursor > f.offset(block.Lbrace) && f.cursor <= f.offset(block.Rbrace) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
|
@ -0,0 +1,182 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"io/ioutil"
|
||||
"net/rpc"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
func do_client() int {
|
||||
addr := *g_addr
|
||||
if *g_sock == "unix" {
|
||||
addr = get_socket_filename()
|
||||
}
|
||||
|
||||
// client
|
||||
client, err := rpc.Dial(*g_sock, addr)
|
||||
if err != nil {
|
||||
if *g_sock == "unix" && file_exists(addr) {
|
||||
os.Remove(addr)
|
||||
}
|
||||
|
||||
err = try_run_server()
|
||||
if err != nil {
|
||||
fmt.Printf("%s\n", err.Error())
|
||||
return 1
|
||||
}
|
||||
client, err = try_to_connect(*g_sock, addr)
|
||||
if err != nil {
|
||||
fmt.Printf("%s\n", err.Error())
|
||||
return 1
|
||||
}
|
||||
}
|
||||
defer client.Close()
|
||||
|
||||
if flag.NArg() > 0 {
|
||||
switch flag.Arg(0) {
|
||||
case "autocomplete":
|
||||
cmd_auto_complete(client)
|
||||
case "close":
|
||||
cmd_close(client)
|
||||
case "status":
|
||||
cmd_status(client)
|
||||
case "drop-cache":
|
||||
cmd_drop_cache(client)
|
||||
case "set":
|
||||
cmd_set(client)
|
||||
default:
|
||||
fmt.Printf("unknown argument: %q, try running \"gocode -h\"\n", flag.Arg(0))
|
||||
return 1
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func try_run_server() error {
|
||||
path := get_executable_filename()
|
||||
args := []string{os.Args[0], "-s", "-sock", *g_sock, "-addr", *g_addr}
|
||||
cwd, _ := os.Getwd()
|
||||
|
||||
var err error
|
||||
stdin, err := os.Open(os.DevNull)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
stdout, err := os.OpenFile(os.DevNull, os.O_WRONLY, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
stderr, err := os.OpenFile(os.DevNull, os.O_WRONLY, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
procattr := os.ProcAttr{Dir: cwd, Env: os.Environ(), Files: []*os.File{stdin, stdout, stderr}}
|
||||
p, err := os.StartProcess(path, args, &procattr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return p.Release()
|
||||
}
|
||||
|
||||
func try_to_connect(network, address string) (client *rpc.Client, err error) {
|
||||
t := 0
|
||||
for {
|
||||
client, err = rpc.Dial(network, address)
|
||||
if err != nil && t < 1000 {
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
t += 10
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func prepare_file_filename_cursor() ([]byte, string, int) {
|
||||
var file []byte
|
||||
var err error
|
||||
|
||||
if *g_input != "" {
|
||||
file, err = ioutil.ReadFile(*g_input)
|
||||
} else {
|
||||
file, err = ioutil.ReadAll(os.Stdin)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
|
||||
var skipped int
|
||||
file, skipped = filter_out_shebang(file)
|
||||
|
||||
filename := *g_input
|
||||
cursor := -1
|
||||
|
||||
offset := ""
|
||||
switch flag.NArg() {
|
||||
case 2:
|
||||
offset = flag.Arg(1)
|
||||
case 3:
|
||||
filename = flag.Arg(1) // Override default filename
|
||||
offset = flag.Arg(2)
|
||||
}
|
||||
|
||||
if offset != "" {
|
||||
if offset[0] == 'c' || offset[0] == 'C' {
|
||||
cursor, _ = strconv.Atoi(offset[1:])
|
||||
cursor = char_to_byte_offset(file, cursor)
|
||||
} else {
|
||||
cursor, _ = strconv.Atoi(offset)
|
||||
}
|
||||
}
|
||||
|
||||
cursor -= skipped
|
||||
if filename != "" && !filepath.IsAbs(filename) {
|
||||
cwd, _ := os.Getwd()
|
||||
filename = filepath.Join(cwd, filename)
|
||||
}
|
||||
return file, filename, cursor
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// commands
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
func cmd_status(c *rpc.Client) {
|
||||
fmt.Printf("%s\n", client_status(c, 0))
|
||||
}
|
||||
|
||||
func cmd_auto_complete(c *rpc.Client) {
|
||||
context := pack_build_context(&build.Default)
|
||||
file, filename, cursor := prepare_file_filename_cursor()
|
||||
f := get_formatter(*g_format)
|
||||
f.write_candidates(client_auto_complete(c, file, filename, cursor, context))
|
||||
}
|
||||
|
||||
func cmd_close(c *rpc.Client) {
|
||||
client_close(c, 0)
|
||||
}
|
||||
|
||||
func cmd_drop_cache(c *rpc.Client) {
|
||||
client_drop_cache(c, 0)
|
||||
}
|
||||
|
||||
func cmd_set(c *rpc.Client) {
|
||||
switch flag.NArg() {
|
||||
case 1:
|
||||
fmt.Print(client_set(c, "\x00", "\x00"))
|
||||
case 2:
|
||||
fmt.Print(client_set(c, flag.Arg(1), "\x00"))
|
||||
case 3:
|
||||
fmt.Print(client_set(c, flag.Arg(1), flag.Arg(2)))
|
||||
}
|
||||
}
|
|
@ -0,0 +1,177 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"reflect"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// config
|
||||
//
|
||||
// Structure represents persistent config storage of the gocode daemon. Usually
|
||||
// the config is located somewhere in ~/.config/gocode directory.
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type config struct {
|
||||
ProposeBuiltins bool `json:"propose-builtins"`
|
||||
LibPath string `json:"lib-path"`
|
||||
CustomPkgPrefix string `json:"custom-pkg-prefix"`
|
||||
CustomVendorDir string `json:"custom-vendor-dir"`
|
||||
Autobuild bool `json:"autobuild"`
|
||||
ForceDebugOutput string `json:"force-debug-output"`
|
||||
PackageLookupMode string `json:"package-lookup-mode"`
|
||||
CloseTimeout int `json:"close-timeout"`
|
||||
UnimportedPackages bool `json:"unimported-packages"`
|
||||
}
|
||||
|
||||
var g_config = config{
|
||||
ProposeBuiltins: false,
|
||||
LibPath: "",
|
||||
CustomPkgPrefix: "",
|
||||
Autobuild: false,
|
||||
ForceDebugOutput: "",
|
||||
PackageLookupMode: "go",
|
||||
CloseTimeout: 1800,
|
||||
UnimportedPackages: false,
|
||||
}
|
||||
|
||||
var g_string_to_bool = map[string]bool{
|
||||
"t": true,
|
||||
"true": true,
|
||||
"y": true,
|
||||
"yes": true,
|
||||
"on": true,
|
||||
"1": true,
|
||||
"f": false,
|
||||
"false": false,
|
||||
"n": false,
|
||||
"no": false,
|
||||
"off": false,
|
||||
"0": false,
|
||||
}
|
||||
|
||||
func set_value(v reflect.Value, value string) {
|
||||
switch t := v; t.Kind() {
|
||||
case reflect.Bool:
|
||||
v, ok := g_string_to_bool[value]
|
||||
if ok {
|
||||
t.SetBool(v)
|
||||
}
|
||||
case reflect.String:
|
||||
t.SetString(value)
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
v, err := strconv.ParseInt(value, 10, 64)
|
||||
if err == nil {
|
||||
t.SetInt(v)
|
||||
}
|
||||
case reflect.Float32, reflect.Float64:
|
||||
v, err := strconv.ParseFloat(value, 64)
|
||||
if err == nil {
|
||||
t.SetFloat(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func list_value(v reflect.Value, name string, w io.Writer) {
|
||||
switch t := v; t.Kind() {
|
||||
case reflect.Bool:
|
||||
fmt.Fprintf(w, "%s %v\n", name, t.Bool())
|
||||
case reflect.String:
|
||||
fmt.Fprintf(w, "%s \"%v\"\n", name, t.String())
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
fmt.Fprintf(w, "%s %v\n", name, t.Int())
|
||||
case reflect.Float32, reflect.Float64:
|
||||
fmt.Fprintf(w, "%s %v\n", name, t.Float())
|
||||
}
|
||||
}
|
||||
|
||||
func (this *config) list() string {
|
||||
str, typ := this.value_and_type()
|
||||
buf := bytes.NewBuffer(make([]byte, 0, 256))
|
||||
for i := 0; i < str.NumField(); i++ {
|
||||
v := str.Field(i)
|
||||
name := typ.Field(i).Tag.Get("json")
|
||||
list_value(v, name, buf)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func (this *config) list_option(name string) string {
|
||||
str, typ := this.value_and_type()
|
||||
buf := bytes.NewBuffer(make([]byte, 0, 256))
|
||||
for i := 0; i < str.NumField(); i++ {
|
||||
v := str.Field(i)
|
||||
nm := typ.Field(i).Tag.Get("json")
|
||||
if nm == name {
|
||||
list_value(v, name, buf)
|
||||
}
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func (this *config) set_option(name, value string) string {
|
||||
str, typ := this.value_and_type()
|
||||
buf := bytes.NewBuffer(make([]byte, 0, 256))
|
||||
for i := 0; i < str.NumField(); i++ {
|
||||
v := str.Field(i)
|
||||
nm := typ.Field(i).Tag.Get("json")
|
||||
if nm == name {
|
||||
set_value(v, value)
|
||||
list_value(v, name, buf)
|
||||
}
|
||||
}
|
||||
this.write()
|
||||
return buf.String()
|
||||
|
||||
}
|
||||
|
||||
func (this *config) value_and_type() (reflect.Value, reflect.Type) {
|
||||
v := reflect.ValueOf(this).Elem()
|
||||
return v, v.Type()
|
||||
}
|
||||
|
||||
func (this *config) write() error {
|
||||
data, err := json.Marshal(this)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// make sure config dir exists
|
||||
dir := config_dir()
|
||||
if !file_exists(dir) {
|
||||
os.MkdirAll(dir, 0755)
|
||||
}
|
||||
|
||||
f, err := os.Create(config_file())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
_, err = f.Write(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (this *config) read() error {
|
||||
data, err := ioutil.ReadFile(config_file())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(data, this)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,557 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/scanner"
|
||||
"go/token"
|
||||
"log"
|
||||
)
|
||||
|
||||
type cursor_context struct {
|
||||
decl *decl
|
||||
partial string
|
||||
struct_field bool
|
||||
decl_import bool
|
||||
|
||||
// store expression that was supposed to be deduced to "decl", however
|
||||
// if decl is nil, then deduction failed, we could try to resolve it to
|
||||
// unimported package instead
|
||||
expr ast.Expr
|
||||
}
|
||||
|
||||
type token_iterator struct {
|
||||
tokens []token_item
|
||||
token_index int
|
||||
}
|
||||
|
||||
type token_item struct {
|
||||
off int
|
||||
tok token.Token
|
||||
lit string
|
||||
}
|
||||
|
||||
func (i token_item) literal() string {
|
||||
if i.tok.IsLiteral() {
|
||||
return i.lit
|
||||
} else {
|
||||
return i.tok.String()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func new_token_iterator(src []byte, cursor int) token_iterator {
|
||||
tokens := make([]token_item, 0, 1000)
|
||||
var s scanner.Scanner
|
||||
fset := token.NewFileSet()
|
||||
file := fset.AddFile("", fset.Base(), len(src))
|
||||
s.Init(file, src, nil, 0)
|
||||
for {
|
||||
pos, tok, lit := s.Scan()
|
||||
off := fset.Position(pos).Offset
|
||||
if tok == token.EOF || cursor <= off {
|
||||
break
|
||||
}
|
||||
tokens = append(tokens, token_item{
|
||||
off: off,
|
||||
tok: tok,
|
||||
lit: lit,
|
||||
})
|
||||
}
|
||||
return token_iterator{
|
||||
tokens: tokens,
|
||||
token_index: len(tokens) - 1,
|
||||
}
|
||||
}
|
||||
|
||||
func (this *token_iterator) token() token_item {
|
||||
return this.tokens[this.token_index]
|
||||
}
|
||||
|
||||
func (this *token_iterator) go_back() bool {
|
||||
if this.token_index <= 0 {
|
||||
return false
|
||||
}
|
||||
this.token_index--
|
||||
return true
|
||||
}
|
||||
|
||||
var bracket_pairs_map = map[token.Token]token.Token{
|
||||
token.RPAREN: token.LPAREN,
|
||||
token.RBRACK: token.LBRACK,
|
||||
token.RBRACE: token.LBRACE,
|
||||
}
|
||||
|
||||
func (ti *token_iterator) skip_to_left(left, right token.Token) bool {
|
||||
if ti.token().tok == left {
|
||||
return true
|
||||
}
|
||||
balance := 1
|
||||
for balance != 0 {
|
||||
if !ti.go_back() {
|
||||
return false
|
||||
}
|
||||
switch ti.token().tok {
|
||||
case right:
|
||||
balance++
|
||||
case left:
|
||||
balance--
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// when the cursor is at the ')' or ']' or '}', move the cursor to an opposite
|
||||
// bracket pair, this functions takes nested bracket pairs into account
|
||||
func (this *token_iterator) skip_to_balanced_pair() bool {
|
||||
right := this.token().tok
|
||||
left := bracket_pairs_map[right]
|
||||
return this.skip_to_left(left, right)
|
||||
}
|
||||
|
||||
// Move the cursor to the open brace of the current block, taking nested blocks
|
||||
// into account.
|
||||
func (this *token_iterator) skip_to_left_curly() bool {
|
||||
return this.skip_to_left(token.LBRACE, token.RBRACE)
|
||||
}
|
||||
|
||||
// Extract the type expression right before the enclosing curly bracket block.
|
||||
// Examples (# - the cursor):
|
||||
// &lib.Struct{Whatever: 1, Hel#} // returns "lib.Struct"
|
||||
// X{#} // returns X
|
||||
// The idea is that we check if this type expression is a type and it is, we
|
||||
// can apply special filtering for autocompletion results.
|
||||
// Sadly, this doesn't cover anonymous structs.
|
||||
func (ti *token_iterator) extract_struct_type() string {
|
||||
if !ti.skip_to_left_curly() {
|
||||
return ""
|
||||
}
|
||||
if !ti.go_back() {
|
||||
return ""
|
||||
}
|
||||
if ti.token().tok != token.IDENT {
|
||||
return ""
|
||||
}
|
||||
b := ti.token().literal()
|
||||
if !ti.go_back() {
|
||||
return b
|
||||
}
|
||||
if ti.token().tok != token.PERIOD {
|
||||
return b
|
||||
}
|
||||
if !ti.go_back() {
|
||||
return b
|
||||
}
|
||||
if ti.token().tok != token.IDENT {
|
||||
return b
|
||||
}
|
||||
return ti.token().literal() + "." + b
|
||||
}
|
||||
|
||||
// Starting from the token under the cursor move back and extract something
|
||||
// that resembles a valid Go primary expression. Examples of primary expressions
|
||||
// from Go spec:
|
||||
// x
|
||||
// 2
|
||||
// (s + ".txt")
|
||||
// f(3.1415, true)
|
||||
// Point{1, 2}
|
||||
// m["foo"]
|
||||
// s[i : j + 1]
|
||||
// obj.color
|
||||
// f.p[i].x()
|
||||
//
|
||||
// As you can see we can move through all of them using balanced bracket
|
||||
// matching and applying simple rules
|
||||
// E.g.
|
||||
// Point{1, 2}.m["foo"].s[i : j + 1].MethodCall(a, func(a, b int) int { return a + b }).
|
||||
// Can be seen as:
|
||||
// Point{ }.m[ ].s[ ].MethodCall( ).
|
||||
// Which boils the rules down to these connected via dots:
|
||||
// ident
|
||||
// ident[]
|
||||
// ident{}
|
||||
// ident()
|
||||
// Of course there are also slightly more complicated rules for brackets:
|
||||
// ident{}.ident()[5][4](), etc.
|
||||
func (this *token_iterator) extract_go_expr() string {
|
||||
orig := this.token_index
|
||||
|
||||
// Contains the type of the previously scanned token (initialized with
|
||||
// the token right under the cursor). This is the token to the *right* of
|
||||
// the current one.
|
||||
prev := this.token().tok
|
||||
loop:
|
||||
for {
|
||||
if !this.go_back() {
|
||||
return token_items_to_string(this.tokens[:orig])
|
||||
}
|
||||
switch this.token().tok {
|
||||
case token.PERIOD:
|
||||
// If the '.' is not followed by IDENT, it's invalid.
|
||||
if prev != token.IDENT {
|
||||
break loop
|
||||
}
|
||||
case token.IDENT:
|
||||
// Valid tokens after IDENT are '.', '[', '{' and '('.
|
||||
switch prev {
|
||||
case token.PERIOD, token.LBRACK, token.LBRACE, token.LPAREN:
|
||||
// all ok
|
||||
default:
|
||||
break loop
|
||||
}
|
||||
case token.RBRACE:
|
||||
// This one can only be a part of type initialization, like:
|
||||
// Dummy{}.Hello()
|
||||
// It is valid Go if Hello method is defined on a non-pointer receiver.
|
||||
if prev != token.PERIOD {
|
||||
break loop
|
||||
}
|
||||
this.skip_to_balanced_pair()
|
||||
case token.RPAREN, token.RBRACK:
|
||||
// After ']' and ')' their opening counterparts are valid '[', '(',
|
||||
// as well as the dot.
|
||||
switch prev {
|
||||
case token.PERIOD, token.LBRACK, token.LPAREN:
|
||||
// all ok
|
||||
default:
|
||||
break loop
|
||||
}
|
||||
this.skip_to_balanced_pair()
|
||||
default:
|
||||
break loop
|
||||
}
|
||||
prev = this.token().tok
|
||||
}
|
||||
expr := token_items_to_string(this.tokens[this.token_index+1 : orig])
|
||||
if *g_debug {
|
||||
log.Printf("extracted expression tokens: %s", expr)
|
||||
}
|
||||
return expr
|
||||
}
|
||||
|
||||
// Given a slice of token_item, reassembles them into the original literal
|
||||
// expression.
|
||||
func token_items_to_string(tokens []token_item) string {
|
||||
var buf bytes.Buffer
|
||||
for _, t := range tokens {
|
||||
buf.WriteString(t.literal())
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// this function is called when the cursor is at the '.' and you need to get the
|
||||
// declaration before that dot
|
||||
func (c *auto_complete_context) deduce_cursor_decl(iter *token_iterator) (*decl, ast.Expr) {
|
||||
expr, err := parser.ParseExpr(iter.extract_go_expr())
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
return expr_to_decl(expr, c.current.scope), expr
|
||||
}
|
||||
|
||||
// try to find and extract the surrounding struct literal type
|
||||
func (c *auto_complete_context) deduce_struct_type_decl(iter *token_iterator) *decl {
|
||||
typ := iter.extract_struct_type()
|
||||
if typ == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
expr, err := parser.ParseExpr(typ)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
decl := type_to_decl(expr, c.current.scope)
|
||||
if decl == nil {
|
||||
return nil
|
||||
}
|
||||
if _, ok := decl.typ.(*ast.StructType); !ok {
|
||||
return nil
|
||||
}
|
||||
return decl
|
||||
}
|
||||
|
||||
// Entry point from autocompletion, the function looks at text before the cursor
|
||||
// and figures out the declaration the cursor is on. This declaration is
|
||||
// used in filtering the resulting set of autocompletion suggestions.
|
||||
func (c *auto_complete_context) deduce_cursor_context(file []byte, cursor int) (cursor_context, bool) {
|
||||
if cursor <= 0 {
|
||||
return cursor_context{}, true
|
||||
}
|
||||
|
||||
iter := new_token_iterator(file, cursor)
|
||||
if len(iter.tokens) == 0 {
|
||||
return cursor_context{}, false
|
||||
}
|
||||
|
||||
// figure out what is just before the cursor
|
||||
switch tok := iter.token(); tok.tok {
|
||||
case token.STRING:
|
||||
// make sure cursor is inside the string
|
||||
s := tok.literal()
|
||||
if len(s) > 1 && s[len(s)-1] == '"' && tok.off+len(s) <= cursor {
|
||||
return cursor_context{}, true
|
||||
}
|
||||
// now figure out if inside an import declaration
|
||||
var ptok = token.STRING
|
||||
for iter.go_back() {
|
||||
itok := iter.token().tok
|
||||
switch itok {
|
||||
case token.STRING:
|
||||
switch ptok {
|
||||
case token.SEMICOLON, token.IDENT, token.PERIOD:
|
||||
default:
|
||||
return cursor_context{}, true
|
||||
}
|
||||
case token.LPAREN, token.SEMICOLON:
|
||||
switch ptok {
|
||||
case token.STRING, token.IDENT, token.PERIOD:
|
||||
default:
|
||||
return cursor_context{}, true
|
||||
}
|
||||
case token.IDENT, token.PERIOD:
|
||||
switch ptok {
|
||||
case token.STRING:
|
||||
default:
|
||||
return cursor_context{}, true
|
||||
}
|
||||
case token.IMPORT:
|
||||
switch ptok {
|
||||
case token.STRING, token.IDENT, token.PERIOD, token.LPAREN:
|
||||
path_len := cursor - tok.off
|
||||
path := s[1:path_len]
|
||||
return cursor_context{decl_import: true, partial: path}, true
|
||||
default:
|
||||
return cursor_context{}, true
|
||||
}
|
||||
default:
|
||||
return cursor_context{}, true
|
||||
}
|
||||
ptok = itok
|
||||
}
|
||||
case token.PERIOD:
|
||||
// we're '<whatever>.'
|
||||
// figure out decl, Partial is ""
|
||||
decl, expr := c.deduce_cursor_decl(&iter)
|
||||
return cursor_context{decl: decl, expr: expr}, decl != nil
|
||||
case token.IDENT, token.TYPE, token.CONST, token.VAR, token.FUNC, token.PACKAGE:
|
||||
// we're '<whatever>.<ident>'
|
||||
// parse <ident> as Partial and figure out decl
|
||||
var partial string
|
||||
if tok.tok == token.IDENT {
|
||||
// Calculate the offset of the cursor position within the identifier.
|
||||
// For instance, if we are 'ab#c', we want partial_len = 2 and partial = ab.
|
||||
partial_len := cursor - tok.off
|
||||
|
||||
// If it happens that the cursor is past the end of the literal,
|
||||
// means there is a space between the literal and the cursor, think
|
||||
// of it as no context, because that's what it really is.
|
||||
if partial_len > len(tok.literal()) {
|
||||
return cursor_context{}, true
|
||||
}
|
||||
partial = tok.literal()[0:partial_len]
|
||||
} else {
|
||||
// Do not try to truncate if it is not an identifier.
|
||||
partial = tok.literal()
|
||||
}
|
||||
|
||||
iter.go_back()
|
||||
switch iter.token().tok {
|
||||
case token.PERIOD:
|
||||
decl, expr := c.deduce_cursor_decl(&iter)
|
||||
return cursor_context{decl: decl, partial: partial, expr: expr}, decl != nil
|
||||
case token.COMMA, token.LBRACE:
|
||||
// This can happen for struct fields:
|
||||
// &Struct{Hello: 1, Wor#} // (# - the cursor)
|
||||
// Let's try to find the struct type
|
||||
decl := c.deduce_struct_type_decl(&iter)
|
||||
return cursor_context{
|
||||
decl: decl,
|
||||
partial: partial,
|
||||
struct_field: decl != nil,
|
||||
}, true
|
||||
default:
|
||||
return cursor_context{partial: partial}, true
|
||||
}
|
||||
case token.COMMA, token.LBRACE:
|
||||
// Try to parse the current expression as a structure initialization.
|
||||
decl := c.deduce_struct_type_decl(&iter)
|
||||
return cursor_context{
|
||||
decl: decl,
|
||||
partial: "",
|
||||
struct_field: decl != nil,
|
||||
}, true
|
||||
}
|
||||
|
||||
return cursor_context{}, true
|
||||
}
|
||||
|
||||
// Decl deduction failed, but we're on "<ident>.", this ident can be an
|
||||
// unexported package, let's try to match the ident against a set of known
|
||||
// packages and if it matches try to import it.
|
||||
// TODO: Right now I've made a static list of built-in packages, but in theory
|
||||
// we could scan all GOPATH packages as well. Now, don't forget that default
|
||||
// package name has nothing to do with package file name, that's why we need to
|
||||
// scan the packages. And many of them will have conflicts. Can we make a smart
|
||||
// prediction algorithm which will prefer certain packages over another ones?
|
||||
func resolveKnownPackageIdent(ident string, filename string, context *package_lookup_context) *decl {
|
||||
importPath, ok := knownPackageIdents[ident]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
path, ok := abs_path_for_package(filename, importPath, context)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
p := new_package_file_cache(path)
|
||||
p.update_cache()
|
||||
return p.main
|
||||
}
|
||||
|
||||
var knownPackageIdents = map[string]string{
|
||||
"adler32": "hash/adler32",
|
||||
"aes": "crypto/aes",
|
||||
"ascii85": "encoding/ascii85",
|
||||
"asn1": "encoding/asn1",
|
||||
"ast": "go/ast",
|
||||
"atomic": "sync/atomic",
|
||||
"base32": "encoding/base32",
|
||||
"base64": "encoding/base64",
|
||||
"big": "math/big",
|
||||
"binary": "encoding/binary",
|
||||
"bufio": "bufio",
|
||||
"build": "go/build",
|
||||
"bytes": "bytes",
|
||||
"bzip2": "compress/bzip2",
|
||||
"cgi": "net/http/cgi",
|
||||
"cgo": "runtime/cgo",
|
||||
"cipher": "crypto/cipher",
|
||||
"cmplx": "math/cmplx",
|
||||
"color": "image/color",
|
||||
"constant": "go/constant",
|
||||
"context": "context",
|
||||
"cookiejar": "net/http/cookiejar",
|
||||
"crc32": "hash/crc32",
|
||||
"crc64": "hash/crc64",
|
||||
"crypto": "crypto",
|
||||
"csv": "encoding/csv",
|
||||
"debug": "runtime/debug",
|
||||
"des": "crypto/des",
|
||||
"doc": "go/doc",
|
||||
"draw": "image/draw",
|
||||
"driver": "database/sql/driver",
|
||||
"dsa": "crypto/dsa",
|
||||
"dwarf": "debug/dwarf",
|
||||
"ecdsa": "crypto/ecdsa",
|
||||
"elf": "debug/elf",
|
||||
"elliptic": "crypto/elliptic",
|
||||
"encoding": "encoding",
|
||||
"errors": "errors",
|
||||
"exec": "os/exec",
|
||||
"expvar": "expvar",
|
||||
"fcgi": "net/http/fcgi",
|
||||
"filepath": "path/filepath",
|
||||
"flag": "flag",
|
||||
"flate": "compress/flate",
|
||||
"fmt": "fmt",
|
||||
"fnv": "hash/fnv",
|
||||
"format": "go/format",
|
||||
"gif": "image/gif",
|
||||
"gob": "encoding/gob",
|
||||
"gosym": "debug/gosym",
|
||||
"gzip": "compress/gzip",
|
||||
"hash": "hash",
|
||||
"heap": "container/heap",
|
||||
"hex": "encoding/hex",
|
||||
"hmac": "crypto/hmac",
|
||||
"hpack": "vendor/golang_org/x/net/http2/hpack",
|
||||
"html": "html",
|
||||
"http": "net/http",
|
||||
"httplex": "vendor/golang_org/x/net/lex/httplex",
|
||||
"httptest": "net/http/httptest",
|
||||
"httptrace": "net/http/httptrace",
|
||||
"httputil": "net/http/httputil",
|
||||
"image": "image",
|
||||
"importer": "go/importer",
|
||||
"io": "io",
|
||||
"iotest": "testing/iotest",
|
||||
"ioutil": "io/ioutil",
|
||||
"jpeg": "image/jpeg",
|
||||
"json": "encoding/json",
|
||||
"jsonrpc": "net/rpc/jsonrpc",
|
||||
"list": "container/list",
|
||||
"log": "log",
|
||||
"lzw": "compress/lzw",
|
||||
"macho": "debug/macho",
|
||||
"mail": "net/mail",
|
||||
"math": "math",
|
||||
"md5": "crypto/md5",
|
||||
"mime": "mime",
|
||||
"multipart": "mime/multipart",
|
||||
"net": "net",
|
||||
"os": "os",
|
||||
"palette": "image/color/palette",
|
||||
"parse": "text/template/parse",
|
||||
"parser": "go/parser",
|
||||
"path": "path",
|
||||
"pe": "debug/pe",
|
||||
"pem": "encoding/pem",
|
||||
"pkix": "crypto/x509/pkix",
|
||||
"plan9obj": "debug/plan9obj",
|
||||
"png": "image/png",
|
||||
"pprof": "net/http/pprof",
|
||||
"printer": "go/printer",
|
||||
"quick": "testing/quick",
|
||||
"quotedprintable": "mime/quotedprintable",
|
||||
"race": "runtime/race",
|
||||
"rand": "math/rand",
|
||||
"rc4": "crypto/rc4",
|
||||
"reflect": "reflect",
|
||||
"regexp": "regexp",
|
||||
"ring": "container/ring",
|
||||
"rpc": "net/rpc",
|
||||
"rsa": "crypto/rsa",
|
||||
"runtime": "runtime",
|
||||
"scanner": "text/scanner",
|
||||
"sha1": "crypto/sha1",
|
||||
"sha256": "crypto/sha256",
|
||||
"sha512": "crypto/sha512",
|
||||
"signal": "os/signal",
|
||||
"smtp": "net/smtp",
|
||||
"sort": "sort",
|
||||
"sql": "database/sql",
|
||||
"strconv": "strconv",
|
||||
"strings": "strings",
|
||||
"subtle": "crypto/subtle",
|
||||
"suffixarray": "index/suffixarray",
|
||||
"sync": "sync",
|
||||
"syntax": "regexp/syntax",
|
||||
"syscall": "syscall",
|
||||
"syslog": "log/syslog",
|
||||
"tabwriter": "text/tabwriter",
|
||||
"tar": "archive/tar",
|
||||
"template": "html/template",
|
||||
"testing": "testing",
|
||||
"textproto": "net/textproto",
|
||||
"time": "time",
|
||||
"tls": "crypto/tls",
|
||||
"token": "go/token",
|
||||
"trace": "runtime/trace",
|
||||
"types": "go/types",
|
||||
"unicode": "unicode",
|
||||
"url": "net/url",
|
||||
"user": "os/user",
|
||||
"utf16": "unicode/utf16",
|
||||
"utf8": "unicode/utf8",
|
||||
"x509": "crypto/x509",
|
||||
"xml": "encoding/xml",
|
||||
"zip": "archive/zip",
|
||||
"zlib": "compress/zlib",
|
||||
//"scanner": "go/scanner", // DUP: prefer text/scanner
|
||||
//"template": "text/template", // DUP: prefer html/template
|
||||
//"pprof": "runtime/pprof", // DUP: prefer net/http/pprof
|
||||
//"rand": "crypto/rand", // DUP: prefer math/rand
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,518 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// []package_import
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type package_import struct {
|
||||
alias string
|
||||
path string
|
||||
}
|
||||
|
||||
// Parses import declarations until the first non-import declaration and fills
|
||||
// `packages` array with import information.
|
||||
func collect_package_imports(filename string, decls []ast.Decl, context *package_lookup_context) []package_import {
|
||||
pi := make([]package_import, 0, 16)
|
||||
for _, decl := range decls {
|
||||
if gd, ok := decl.(*ast.GenDecl); ok && gd.Tok == token.IMPORT {
|
||||
for _, spec := range gd.Specs {
|
||||
imp := spec.(*ast.ImportSpec)
|
||||
path, alias := path_and_alias(imp)
|
||||
path, ok := abs_path_for_package(filename, path, context)
|
||||
if ok && alias != "_" {
|
||||
pi = append(pi, package_import{alias, path})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
return pi
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// decl_file_cache
|
||||
//
|
||||
// Contains cache for top-level declarations of a file as well as its
|
||||
// contents, AST and import information.
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type decl_file_cache struct {
|
||||
name string // file name
|
||||
mtime int64 // last modification time
|
||||
|
||||
decls map[string]*decl // top-level declarations
|
||||
error error // last error
|
||||
packages []package_import // import information
|
||||
filescope *scope
|
||||
|
||||
fset *token.FileSet
|
||||
context *package_lookup_context
|
||||
}
|
||||
|
||||
func new_decl_file_cache(name string, context *package_lookup_context) *decl_file_cache {
|
||||
return &decl_file_cache{
|
||||
name: name,
|
||||
context: context,
|
||||
}
|
||||
}
|
||||
|
||||
func (f *decl_file_cache) update() {
|
||||
stat, err := os.Stat(f.name)
|
||||
if err != nil {
|
||||
f.decls = nil
|
||||
f.error = err
|
||||
f.fset = nil
|
||||
return
|
||||
}
|
||||
|
||||
statmtime := stat.ModTime().UnixNano()
|
||||
if f.mtime == statmtime {
|
||||
return
|
||||
}
|
||||
|
||||
f.mtime = statmtime
|
||||
f.read_file()
|
||||
}
|
||||
|
||||
func (f *decl_file_cache) read_file() {
|
||||
var data []byte
|
||||
data, f.error = file_reader.read_file(f.name)
|
||||
if f.error != nil {
|
||||
return
|
||||
}
|
||||
data, _ = filter_out_shebang(data)
|
||||
|
||||
f.process_data(data)
|
||||
}
|
||||
|
||||
func (f *decl_file_cache) process_data(data []byte) {
|
||||
var file *ast.File
|
||||
f.fset = token.NewFileSet()
|
||||
file, f.error = parser.ParseFile(f.fset, "", data, 0)
|
||||
f.filescope = new_scope(nil)
|
||||
for _, d := range file.Decls {
|
||||
anonymify_ast(d, 0, f.filescope)
|
||||
}
|
||||
f.packages = collect_package_imports(f.name, file.Decls, f.context)
|
||||
f.decls = make(map[string]*decl, len(file.Decls))
|
||||
for _, decl := range file.Decls {
|
||||
append_to_top_decls(f.decls, decl, f.filescope)
|
||||
}
|
||||
}
|
||||
|
||||
func append_to_top_decls(decls map[string]*decl, decl ast.Decl, scope *scope) {
|
||||
foreach_decl(decl, func(data *foreach_decl_struct) {
|
||||
class := ast_decl_class(data.decl)
|
||||
for i, name := range data.names {
|
||||
typ, v, vi := data.type_value_index(i)
|
||||
|
||||
d := new_decl_full(name.Name, class, 0, typ, v, vi, scope)
|
||||
if d == nil {
|
||||
return
|
||||
}
|
||||
|
||||
methodof := method_of(decl)
|
||||
if methodof != "" {
|
||||
decl, ok := decls[methodof]
|
||||
if ok {
|
||||
decl.add_child(d)
|
||||
} else {
|
||||
decl = new_decl(methodof, decl_methods_stub, scope)
|
||||
decls[methodof] = decl
|
||||
decl.add_child(d)
|
||||
}
|
||||
} else {
|
||||
decl, ok := decls[d.name]
|
||||
if ok {
|
||||
decl.expand_or_replace(d)
|
||||
} else {
|
||||
decls[d.name] = d
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func abs_path_for_package(filename, p string, context *package_lookup_context) (string, bool) {
|
||||
dir, _ := filepath.Split(filename)
|
||||
if len(p) == 0 {
|
||||
return "", false
|
||||
}
|
||||
if p[0] == '.' {
|
||||
return fmt.Sprintf("%s.a", filepath.Join(dir, p)), true
|
||||
}
|
||||
pkg, ok := find_go_dag_package(p, dir)
|
||||
if ok {
|
||||
return pkg, true
|
||||
}
|
||||
return find_global_file(p, context)
|
||||
}
|
||||
|
||||
func path_and_alias(imp *ast.ImportSpec) (string, string) {
|
||||
path := ""
|
||||
if imp.Path != nil && len(imp.Path.Value) > 0 {
|
||||
path = string(imp.Path.Value)
|
||||
path = path[1 : len(path)-1]
|
||||
}
|
||||
alias := ""
|
||||
if imp.Name != nil {
|
||||
alias = imp.Name.Name
|
||||
}
|
||||
return path, alias
|
||||
}
|
||||
|
||||
func find_go_dag_package(imp, filedir string) (string, bool) {
|
||||
// Support godag directory structure
|
||||
dir, pkg := filepath.Split(imp)
|
||||
godag_pkg := filepath.Join(filedir, "..", dir, "_obj", pkg+".a")
|
||||
if file_exists(godag_pkg) {
|
||||
return godag_pkg, true
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
// autobuild compares the mod time of the source files of the package, and if any of them is newer
|
||||
// than the package object file will rebuild it.
|
||||
func autobuild(p *build.Package) error {
|
||||
if p.Dir == "" {
|
||||
return fmt.Errorf("no files to build")
|
||||
}
|
||||
ps, err := os.Stat(p.PkgObj)
|
||||
if err != nil {
|
||||
// Assume package file does not exist and build for the first time.
|
||||
return build_package(p)
|
||||
}
|
||||
pt := ps.ModTime()
|
||||
fs, err := readdir_lstat(p.Dir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, f := range fs {
|
||||
if f.IsDir() {
|
||||
continue
|
||||
}
|
||||
if f.ModTime().After(pt) {
|
||||
// Source file is newer than package file; rebuild.
|
||||
return build_package(p)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// build_package builds the package by calling `go install package/import`. If everything compiles
|
||||
// correctly, the newly compiled package should then be in the usual place in the `$GOPATH/pkg`
|
||||
// directory, and gocode will pick it up from there.
|
||||
func build_package(p *build.Package) error {
|
||||
if *g_debug {
|
||||
log.Printf("-------------------")
|
||||
log.Printf("rebuilding package %s", p.Name)
|
||||
log.Printf("package import: %s", p.ImportPath)
|
||||
log.Printf("package object: %s", p.PkgObj)
|
||||
log.Printf("package source dir: %s", p.Dir)
|
||||
log.Printf("package source files: %v", p.GoFiles)
|
||||
log.Printf("GOPATH: %v", g_daemon.context.GOPATH)
|
||||
log.Printf("GOROOT: %v", g_daemon.context.GOROOT)
|
||||
}
|
||||
env := os.Environ()
|
||||
for i, v := range env {
|
||||
if strings.HasPrefix(v, "GOPATH=") {
|
||||
env[i] = "GOPATH=" + g_daemon.context.GOPATH
|
||||
} else if strings.HasPrefix(v, "GOROOT=") {
|
||||
env[i] = "GOROOT=" + g_daemon.context.GOROOT
|
||||
}
|
||||
}
|
||||
|
||||
cmd := exec.Command("go", "install", p.ImportPath)
|
||||
cmd.Env = env
|
||||
|
||||
// TODO: Should read STDERR rather than STDOUT.
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if *g_debug {
|
||||
log.Printf("build out: %s\n", string(out))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// executes autobuild function if autobuild option is enabled, logs error and
|
||||
// ignores it
|
||||
func try_autobuild(p *build.Package) {
|
||||
if g_config.Autobuild {
|
||||
err := autobuild(p)
|
||||
if err != nil && *g_debug {
|
||||
log.Printf("Autobuild error: %s\n", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func log_found_package_maybe(imp, pkgpath string) {
|
||||
if *g_debug {
|
||||
log.Printf("Found %q at %q\n", imp, pkgpath)
|
||||
}
|
||||
}
|
||||
|
||||
func log_build_context(context *package_lookup_context) {
|
||||
log.Printf(" GOROOT: %s\n", context.GOROOT)
|
||||
log.Printf(" GOPATH: %s\n", context.GOPATH)
|
||||
log.Printf(" GOOS: %s\n", context.GOOS)
|
||||
log.Printf(" GOARCH: %s\n", context.GOARCH)
|
||||
log.Printf(" BzlProjectRoot: %q\n", context.BzlProjectRoot)
|
||||
log.Printf(" GBProjectRoot: %q\n", context.GBProjectRoot)
|
||||
log.Printf(" lib-path: %q\n", g_config.LibPath)
|
||||
}
|
||||
|
||||
// find_global_file returns the file path of the compiled package corresponding to the specified
|
||||
// import, and a boolean stating whether such path is valid.
|
||||
// TODO: Return only one value, possibly empty string if not found.
|
||||
func find_global_file(imp string, context *package_lookup_context) (string, bool) {
|
||||
// gocode synthetically generates the builtin package
|
||||
// "unsafe", since the "unsafe.a" package doesn't really exist.
|
||||
// Thus, when the user request for the package "unsafe" we
|
||||
// would return synthetic global file that would be used
|
||||
// just as a key name to find this synthetic package
|
||||
if imp == "unsafe" {
|
||||
return "unsafe", true
|
||||
}
|
||||
|
||||
pkgfile := fmt.Sprintf("%s.a", imp)
|
||||
|
||||
// if lib-path is defined, use it
|
||||
if g_config.LibPath != "" {
|
||||
for _, p := range filepath.SplitList(g_config.LibPath) {
|
||||
pkg_path := filepath.Join(p, pkgfile)
|
||||
if file_exists(pkg_path) {
|
||||
log_found_package_maybe(imp, pkg_path)
|
||||
return pkg_path, true
|
||||
}
|
||||
// Also check the relevant pkg/OS_ARCH dir for the libpath, if provided.
|
||||
pkgdir := fmt.Sprintf("%s_%s", context.GOOS, context.GOARCH)
|
||||
pkg_path = filepath.Join(p, "pkg", pkgdir, pkgfile)
|
||||
if file_exists(pkg_path) {
|
||||
log_found_package_maybe(imp, pkg_path)
|
||||
return pkg_path, true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// gb-specific lookup mode, only if the root dir was found
|
||||
if g_config.PackageLookupMode == "gb" && context.GBProjectRoot != "" {
|
||||
root := context.GBProjectRoot
|
||||
pkg_path := filepath.Join(root, "pkg", context.GOOS+"-"+context.GOARCH, pkgfile)
|
||||
if file_exists(pkg_path) {
|
||||
log_found_package_maybe(imp, pkg_path)
|
||||
return pkg_path, true
|
||||
}
|
||||
}
|
||||
|
||||
// bzl-specific lookup mode, only if the root dir was found
|
||||
if g_config.PackageLookupMode == "bzl" && context.BzlProjectRoot != "" {
|
||||
var root, impath string
|
||||
if strings.HasPrefix(imp, g_config.CustomPkgPrefix+"/") {
|
||||
root = filepath.Join(context.BzlProjectRoot, "bazel-bin")
|
||||
impath = imp[len(g_config.CustomPkgPrefix)+1:]
|
||||
} else if g_config.CustomVendorDir != "" {
|
||||
// Try custom vendor dir.
|
||||
root = filepath.Join(context.BzlProjectRoot, "bazel-bin", g_config.CustomVendorDir)
|
||||
impath = imp
|
||||
}
|
||||
|
||||
if root != "" && impath != "" {
|
||||
// There might be more than one ".a" files in the pkg path with bazel.
|
||||
// But the best practice is to keep one go_library build target in each
|
||||
// pakcage directory so that it follows the standard Go package
|
||||
// structure. Thus here we assume there is at most one ".a" file existing
|
||||
// in the pkg path.
|
||||
if d, err := os.Open(filepath.Join(root, impath)); err == nil {
|
||||
defer d.Close()
|
||||
|
||||
if fis, err := d.Readdir(-1); err == nil {
|
||||
for _, fi := range fis {
|
||||
if !fi.IsDir() && filepath.Ext(fi.Name()) == ".a" {
|
||||
pkg_path := filepath.Join(root, impath, fi.Name())
|
||||
log_found_package_maybe(imp, pkg_path)
|
||||
return pkg_path, true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if context.CurrentPackagePath != "" {
|
||||
// Try vendor path first, see GO15VENDOREXPERIMENT.
|
||||
// We don't check this environment variable however, seems like there is
|
||||
// almost no harm in doing so (well.. if you experiment with vendoring,
|
||||
// gocode will fail after enabling/disabling the flag, and you'll be
|
||||
// forced to get rid of vendor binaries). But asking users to set this
|
||||
// env var is up will bring more trouble. Because we also need to pass
|
||||
// it from client to server, make sure their editors set it, etc.
|
||||
// So, whatever, let's just pretend it's always on.
|
||||
package_path := context.CurrentPackagePath
|
||||
for {
|
||||
limp := filepath.Join(package_path, "vendor", imp)
|
||||
if p, err := context.Import(limp, "", build.AllowBinary|build.FindOnly); err == nil {
|
||||
try_autobuild(p)
|
||||
if file_exists(p.PkgObj) {
|
||||
log_found_package_maybe(imp, p.PkgObj)
|
||||
return p.PkgObj, true
|
||||
}
|
||||
}
|
||||
if package_path == "" {
|
||||
break
|
||||
}
|
||||
next_path := filepath.Dir(package_path)
|
||||
// let's protect ourselves from inf recursion here
|
||||
if next_path == package_path {
|
||||
break
|
||||
}
|
||||
package_path = next_path
|
||||
}
|
||||
}
|
||||
|
||||
if p, err := context.Import(imp, "", build.AllowBinary|build.FindOnly); err == nil {
|
||||
try_autobuild(p)
|
||||
if file_exists(p.PkgObj) {
|
||||
log_found_package_maybe(imp, p.PkgObj)
|
||||
return p.PkgObj, true
|
||||
}
|
||||
}
|
||||
|
||||
if *g_debug {
|
||||
log.Printf("Import path %q was not resolved\n", imp)
|
||||
log.Println("Gocode's build context is:")
|
||||
log_build_context(context)
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
func package_name(file *ast.File) string {
|
||||
if file.Name != nil {
|
||||
return file.Name.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// decl_cache
|
||||
//
|
||||
// Thread-safe collection of DeclFileCache entities.
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type package_lookup_context struct {
|
||||
build.Context
|
||||
BzlProjectRoot string
|
||||
GBProjectRoot string
|
||||
CurrentPackagePath string
|
||||
}
|
||||
|
||||
// gopath returns the list of Go path directories.
|
||||
func (ctxt *package_lookup_context) gopath() []string {
|
||||
var all []string
|
||||
for _, p := range filepath.SplitList(ctxt.GOPATH) {
|
||||
if p == "" || p == ctxt.GOROOT {
|
||||
// Empty paths are uninteresting.
|
||||
// If the path is the GOROOT, ignore it.
|
||||
// People sometimes set GOPATH=$GOROOT.
|
||||
// Do not get confused by this common mistake.
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(p, "~") {
|
||||
// Path segments starting with ~ on Unix are almost always
|
||||
// users who have incorrectly quoted ~ while setting GOPATH,
|
||||
// preventing it from expanding to $HOME.
|
||||
// The situation is made more confusing by the fact that
|
||||
// bash allows quoted ~ in $PATH (most shells do not).
|
||||
// Do not get confused by this, and do not try to use the path.
|
||||
// It does not exist, and printing errors about it confuses
|
||||
// those users even more, because they think "sure ~ exists!".
|
||||
// The go command diagnoses this situation and prints a
|
||||
// useful error.
|
||||
// On Windows, ~ is used in short names, such as c:\progra~1
|
||||
// for c:\program files.
|
||||
continue
|
||||
}
|
||||
all = append(all, p)
|
||||
}
|
||||
return all
|
||||
}
|
||||
|
||||
func (ctxt *package_lookup_context) pkg_dirs() []string {
|
||||
pkgdir := fmt.Sprintf("%s_%s", ctxt.GOOS, ctxt.GOARCH)
|
||||
|
||||
var all []string
|
||||
if ctxt.GOROOT != "" {
|
||||
dir := filepath.Join(ctxt.GOROOT, "pkg", pkgdir)
|
||||
if is_dir(dir) {
|
||||
all = append(all, dir)
|
||||
}
|
||||
}
|
||||
|
||||
switch g_config.PackageLookupMode {
|
||||
case "go":
|
||||
for _, p := range ctxt.gopath() {
|
||||
dir := filepath.Join(p, "pkg", pkgdir)
|
||||
if is_dir(dir) {
|
||||
all = append(all, dir)
|
||||
}
|
||||
}
|
||||
case "gb":
|
||||
if ctxt.GBProjectRoot != "" {
|
||||
pkgdir := fmt.Sprintf("%s-%s", ctxt.GOOS, ctxt.GOARCH)
|
||||
dir := filepath.Join(ctxt.GBProjectRoot, "pkg", pkgdir)
|
||||
if is_dir(dir) {
|
||||
all = append(all, dir)
|
||||
}
|
||||
}
|
||||
case "bzl":
|
||||
// TODO: Support bazel mode
|
||||
}
|
||||
return all
|
||||
}
|
||||
|
||||
type decl_cache struct {
|
||||
cache map[string]*decl_file_cache
|
||||
context *package_lookup_context
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
func new_decl_cache(context *package_lookup_context) *decl_cache {
|
||||
return &decl_cache{
|
||||
cache: make(map[string]*decl_file_cache),
|
||||
context: context,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *decl_cache) get(filename string) *decl_file_cache {
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
|
||||
f, ok := c.cache[filename]
|
||||
if !ok {
|
||||
f = new_decl_file_cache(filename, c.context)
|
||||
c.cache[filename] = f
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
func (c *decl_cache) get_and_update(filename string) *decl_file_cache {
|
||||
f := c.get(filename)
|
||||
f.update()
|
||||
return f
|
||||
}
|
|
@ -0,0 +1,172 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// formatter interfaces
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type formatter interface {
|
||||
write_candidates(candidates []candidate, num int)
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// nice_formatter (just for testing, simple textual output)
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type nice_formatter struct{}
|
||||
|
||||
func (*nice_formatter) write_candidates(candidates []candidate, num int) {
|
||||
if candidates == nil {
|
||||
fmt.Printf("Nothing to complete.\n")
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d candidates:\n", len(candidates))
|
||||
for _, c := range candidates {
|
||||
abbr := fmt.Sprintf("%s %s %s", c.Class, c.Name, c.Type)
|
||||
if c.Class == decl_func {
|
||||
abbr = fmt.Sprintf("%s %s%s", c.Class, c.Name, c.Type[len("func"):])
|
||||
}
|
||||
fmt.Printf(" %s\n", abbr)
|
||||
}
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// vim_formatter
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type vim_formatter struct{}
|
||||
|
||||
func (*vim_formatter) write_candidates(candidates []candidate, num int) {
|
||||
if candidates == nil {
|
||||
fmt.Print("[0, []]")
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("[%d, [", num)
|
||||
for i, c := range candidates {
|
||||
if i != 0 {
|
||||
fmt.Printf(", ")
|
||||
}
|
||||
|
||||
word := c.Name
|
||||
if c.Class == decl_func {
|
||||
word += "("
|
||||
if strings.HasPrefix(c.Type, "func()") {
|
||||
word += ")"
|
||||
}
|
||||
}
|
||||
|
||||
abbr := fmt.Sprintf("%s %s %s", c.Class, c.Name, c.Type)
|
||||
if c.Class == decl_func {
|
||||
abbr = fmt.Sprintf("%s %s%s", c.Class, c.Name, c.Type[len("func"):])
|
||||
}
|
||||
fmt.Printf("{'word': '%s', 'abbr': '%s', 'info': '%s'}", word, abbr, abbr)
|
||||
}
|
||||
fmt.Printf("]]")
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// godit_formatter
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type godit_formatter struct{}
|
||||
|
||||
func (*godit_formatter) write_candidates(candidates []candidate, num int) {
|
||||
fmt.Printf("%d,,%d\n", num, len(candidates))
|
||||
for _, c := range candidates {
|
||||
contents := c.Name
|
||||
if c.Class == decl_func {
|
||||
contents += "("
|
||||
if strings.HasPrefix(c.Type, "func()") {
|
||||
contents += ")"
|
||||
}
|
||||
}
|
||||
|
||||
display := fmt.Sprintf("%s %s %s", c.Class, c.Name, c.Type)
|
||||
if c.Class == decl_func {
|
||||
display = fmt.Sprintf("%s %s%s", c.Class, c.Name, c.Type[len("func"):])
|
||||
}
|
||||
fmt.Printf("%s,,%s\n", display, contents)
|
||||
}
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// emacs_formatter
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type emacs_formatter struct{}
|
||||
|
||||
func (*emacs_formatter) write_candidates(candidates []candidate, num int) {
|
||||
for _, c := range candidates {
|
||||
var hint string
|
||||
switch {
|
||||
case c.Class == decl_func:
|
||||
hint = c.Type
|
||||
case c.Type == "":
|
||||
hint = c.Class.String()
|
||||
default:
|
||||
hint = c.Class.String() + " " + c.Type
|
||||
}
|
||||
fmt.Printf("%s,,%s\n", c.Name, hint)
|
||||
}
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// csv_formatter
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type csv_formatter struct{}
|
||||
|
||||
func (*csv_formatter) write_candidates(candidates []candidate, num int) {
|
||||
for _, c := range candidates {
|
||||
fmt.Printf("%s,,%s,,%s\n", c.Class, c.Name, c.Type)
|
||||
}
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// json_formatter
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type json_formatter struct{}
|
||||
|
||||
func (*json_formatter) write_candidates(candidates []candidate, num int) {
|
||||
if candidates == nil {
|
||||
fmt.Print("[]")
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf(`[%d, [`, num)
|
||||
for i, c := range candidates {
|
||||
if i != 0 {
|
||||
fmt.Printf(", ")
|
||||
}
|
||||
fmt.Printf(`{"class": "%s", "name": "%s", "type": "%s"}`,
|
||||
c.Class, c.Name, c.Type)
|
||||
}
|
||||
fmt.Print("]]")
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
func get_formatter(name string) formatter {
|
||||
switch name {
|
||||
case "vim":
|
||||
return new(vim_formatter)
|
||||
case "emacs":
|
||||
return new(emacs_formatter)
|
||||
case "nice":
|
||||
return new(nice_formatter)
|
||||
case "csv":
|
||||
return new(csv_formatter)
|
||||
case "json":
|
||||
return new(json_formatter)
|
||||
case "godit":
|
||||
return new(godit_formatter)
|
||||
}
|
||||
return new(nice_formatter)
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
_ "net/http/pprof"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
var (
|
||||
g_is_server = flag.Bool("s", false, "run a server instead of a client")
|
||||
g_format = flag.String("f", "nice", "output format (vim | emacs | nice | csv | json)")
|
||||
g_input = flag.String("in", "", "use this file instead of stdin input")
|
||||
g_sock = create_sock_flag("sock", "socket type (unix | tcp)")
|
||||
g_addr = flag.String("addr", "127.0.0.1:37373", "address for tcp socket")
|
||||
g_debug = flag.Bool("debug", false, "enable server-side debug mode")
|
||||
g_profile = flag.Int("profile", 0, "port on which to expose profiling information for pprof; 0 to disable profiling")
|
||||
)
|
||||
|
||||
func get_socket_filename() string {
|
||||
user := os.Getenv("USER")
|
||||
if user == "" {
|
||||
user = "all"
|
||||
}
|
||||
return filepath.Join(os.TempDir(), fmt.Sprintf("gocode-daemon.%s", user))
|
||||
}
|
||||
|
||||
func show_usage() {
|
||||
fmt.Fprintf(os.Stderr,
|
||||
"Usage: %s [-s] [-f=<format>] [-in=<path>] [-sock=<type>] [-addr=<addr>]\n"+
|
||||
" <command> [<args>]\n\n",
|
||||
os.Args[0])
|
||||
fmt.Fprintf(os.Stderr,
|
||||
"Flags:\n")
|
||||
flag.PrintDefaults()
|
||||
fmt.Fprintf(os.Stderr,
|
||||
"\nCommands:\n"+
|
||||
" autocomplete [<path>] <offset> main autocompletion command\n"+
|
||||
" close close the gocode daemon\n"+
|
||||
" status gocode daemon status report\n"+
|
||||
" drop-cache drop gocode daemon's cache\n"+
|
||||
" set [<name> [<value>]] list or set config options\n")
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Usage = show_usage
|
||||
flag.Parse()
|
||||
|
||||
var retval int
|
||||
if *g_is_server {
|
||||
go func() {
|
||||
if *g_profile <= 0 {
|
||||
return
|
||||
}
|
||||
addr := fmt.Sprintf("localhost:%d", *g_profile)
|
||||
// Use the following commands to profile the binary:
|
||||
// go tool pprof http://localhost:6060/debug/pprof/profile # 30-second CPU profile
|
||||
// go tool pprof http://localhost:6060/debug/pprof/heap # heap profile
|
||||
// go tool pprof http://localhost:6060/debug/pprof/block # goroutine blocking profile
|
||||
// See http://blog.golang.org/profiling-go-programs for more info.
|
||||
log.Printf("enabling profiler on %s", addr)
|
||||
log.Print(http.ListenAndServe(addr, nil))
|
||||
}()
|
||||
retval = do_server()
|
||||
} else {
|
||||
retval = do_client()
|
||||
}
|
||||
os.Exit(retval)
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
// +build !windows
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func create_sock_flag(name, desc string) *string {
|
||||
return flag.String(name, "unix", desc)
|
||||
}
|
||||
|
||||
// Full path of the current executable
|
||||
func get_executable_filename() string {
|
||||
// try readlink first
|
||||
path, err := os.Readlink("/proc/self/exe")
|
||||
if err == nil {
|
||||
return path
|
||||
}
|
||||
// use argv[0]
|
||||
path = os.Args[0]
|
||||
if !filepath.IsAbs(path) {
|
||||
cwd, _ := os.Getwd()
|
||||
path = filepath.Join(cwd, path)
|
||||
}
|
||||
if file_exists(path) {
|
||||
return path
|
||||
}
|
||||
// Fallback : use "gocode" and assume we are in the PATH...
|
||||
path, err = exec.LookPath("gocode")
|
||||
if err == nil {
|
||||
return path
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// config location
|
||||
|
||||
func config_dir() string {
|
||||
return filepath.Join(xdg_home_dir(), "gocode")
|
||||
}
|
||||
|
||||
func config_file() string {
|
||||
return filepath.Join(xdg_home_dir(), "gocode", "config.json")
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var (
|
||||
shell32 = syscall.NewLazyDLL("shell32.dll")
|
||||
kernel32 = syscall.NewLazyDLL("kernel32.dll")
|
||||
)
|
||||
|
||||
var (
|
||||
proc_sh_get_folder_path = shell32.NewProc("SHGetFolderPathW")
|
||||
proc_get_module_file_name = kernel32.NewProc("GetModuleFileNameW")
|
||||
)
|
||||
|
||||
func create_sock_flag(name, desc string) *string {
|
||||
return flag.String(name, "tcp", desc)
|
||||
}
|
||||
|
||||
// Full path of the current executable
|
||||
func get_executable_filename() string {
|
||||
b := make([]uint16, syscall.MAX_PATH)
|
||||
ret, _, err := syscall.Syscall(proc_get_module_file_name.Addr(), 3,
|
||||
0, uintptr(unsafe.Pointer(&b[0])), uintptr(len(b)))
|
||||
if int(ret) == 0 {
|
||||
panic(fmt.Sprintf("GetModuleFileNameW : err %d", int(err)))
|
||||
}
|
||||
return syscall.UTF16ToString(b)
|
||||
}
|
||||
|
||||
const (
|
||||
csidl_appdata = 0x1a
|
||||
)
|
||||
|
||||
func get_appdata_folder_path() string {
|
||||
b := make([]uint16, syscall.MAX_PATH)
|
||||
ret, _, err := syscall.Syscall6(proc_sh_get_folder_path.Addr(), 5,
|
||||
0, csidl_appdata, 0, 0, uintptr(unsafe.Pointer(&b[0])), 0)
|
||||
if int(ret) != 0 {
|
||||
panic(fmt.Sprintf("SHGetFolderPathW : err %d", int(err)))
|
||||
}
|
||||
return syscall.UTF16ToString(b)
|
||||
}
|
||||
|
||||
func config_dir() string {
|
||||
return filepath.Join(get_appdata_folder_path(), "gocode")
|
||||
}
|
||||
|
||||
func config_file() string {
|
||||
return filepath.Join(get_appdata_folder_path(), "gocode", "config.json")
|
||||
}
|
|
@ -0,0 +1,256 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type package_parser interface {
|
||||
parse_export(callback func(pkg string, decl ast.Decl))
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// package_file_cache
|
||||
//
|
||||
// Structure that represents a cache for an imported pacakge. In other words
|
||||
// these are the contents of an archive (*.a) file.
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type package_file_cache struct {
|
||||
name string // file name
|
||||
mtime int64
|
||||
defalias string
|
||||
|
||||
scope *scope
|
||||
main *decl // package declaration
|
||||
others map[string]*decl
|
||||
}
|
||||
|
||||
func new_package_file_cache(name string) *package_file_cache {
|
||||
m := new(package_file_cache)
|
||||
m.name = name
|
||||
m.mtime = 0
|
||||
m.defalias = ""
|
||||
return m
|
||||
}
|
||||
|
||||
// Creates a cache that stays in cache forever. Useful for built-in packages.
|
||||
func new_package_file_cache_forever(name, defalias string) *package_file_cache {
|
||||
m := new(package_file_cache)
|
||||
m.name = name
|
||||
m.mtime = -1
|
||||
m.defalias = defalias
|
||||
return m
|
||||
}
|
||||
|
||||
func (m *package_file_cache) find_file() string {
|
||||
if file_exists(m.name) {
|
||||
return m.name
|
||||
}
|
||||
|
||||
n := len(m.name)
|
||||
filename := m.name[:n-1] + "6"
|
||||
if file_exists(filename) {
|
||||
return filename
|
||||
}
|
||||
|
||||
filename = m.name[:n-1] + "8"
|
||||
if file_exists(filename) {
|
||||
return filename
|
||||
}
|
||||
|
||||
filename = m.name[:n-1] + "5"
|
||||
if file_exists(filename) {
|
||||
return filename
|
||||
}
|
||||
return m.name
|
||||
}
|
||||
|
||||
func (m *package_file_cache) update_cache() {
|
||||
if m.mtime == -1 {
|
||||
return
|
||||
}
|
||||
fname := m.find_file()
|
||||
stat, err := os.Stat(fname)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
statmtime := stat.ModTime().UnixNano()
|
||||
if m.mtime != statmtime {
|
||||
m.mtime = statmtime
|
||||
|
||||
data, err := file_reader.read_file(fname)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
m.process_package_data(data)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *package_file_cache) process_package_data(data []byte) {
|
||||
m.scope = new_scope(g_universe_scope)
|
||||
|
||||
// find import section
|
||||
i := bytes.Index(data, []byte{'\n', '$', '$'})
|
||||
if i == -1 {
|
||||
panic(fmt.Sprintf("Can't find the import section in the package file %s", m.name))
|
||||
}
|
||||
data = data[i+len("\n$$"):]
|
||||
|
||||
// main package
|
||||
m.main = new_decl(m.name, decl_package, nil)
|
||||
// create map for other packages
|
||||
m.others = make(map[string]*decl)
|
||||
|
||||
var pp package_parser
|
||||
if data[0] == 'B' {
|
||||
// binary format, skip 'B\n'
|
||||
data = data[2:]
|
||||
var p gc_bin_parser
|
||||
p.init(data, m)
|
||||
pp = &p
|
||||
} else {
|
||||
// textual format, find the beginning of the package clause
|
||||
i = bytes.Index(data, []byte{'p', 'a', 'c', 'k', 'a', 'g', 'e'})
|
||||
if i == -1 {
|
||||
panic("Can't find the package clause")
|
||||
}
|
||||
data = data[i:]
|
||||
|
||||
var p gc_parser
|
||||
p.init(data, m)
|
||||
pp = &p
|
||||
}
|
||||
|
||||
pp.parse_export(func(pkg string, decl ast.Decl) {
|
||||
anonymify_ast(decl, decl_foreign, m.scope)
|
||||
if pkg == "" || strings.HasPrefix(pkg, "#") {
|
||||
// main package
|
||||
add_ast_decl_to_package(m.main, decl, m.scope)
|
||||
} else {
|
||||
// others
|
||||
if _, ok := m.others[pkg]; !ok {
|
||||
m.others[pkg] = new_decl(pkg, decl_package, nil)
|
||||
}
|
||||
add_ast_decl_to_package(m.others[pkg], decl, m.scope)
|
||||
}
|
||||
})
|
||||
|
||||
// hack, add ourselves to the package scope
|
||||
mainName := "#" + m.defalias
|
||||
m.add_package_to_scope(mainName, m.name)
|
||||
|
||||
// replace dummy package decls in package scope to actual packages
|
||||
for key := range m.scope.entities {
|
||||
if !strings.HasPrefix(key, "#") && !strings.HasPrefix(key, "!") {
|
||||
continue
|
||||
}
|
||||
pkg, ok := m.others[key]
|
||||
if !ok && key == mainName {
|
||||
pkg = m.main
|
||||
}
|
||||
m.scope.replace_decl(key, pkg)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *package_file_cache) add_package_to_scope(alias, realname string) {
|
||||
d := new_decl(realname, decl_package, nil)
|
||||
m.scope.add_decl(alias, d)
|
||||
}
|
||||
|
||||
func add_ast_decl_to_package(pkg *decl, decl ast.Decl, scope *scope) {
|
||||
foreach_decl(decl, func(data *foreach_decl_struct) {
|
||||
class := ast_decl_class(data.decl)
|
||||
for i, name := range data.names {
|
||||
typ, v, vi := data.type_value_index(i)
|
||||
|
||||
d := new_decl_full(name.Name, class, decl_foreign, typ, v, vi, scope)
|
||||
if d == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if !name.IsExported() && d.class != decl_type {
|
||||
return
|
||||
}
|
||||
|
||||
methodof := method_of(data.decl)
|
||||
if methodof != "" {
|
||||
decl := pkg.find_child(methodof)
|
||||
if decl != nil {
|
||||
decl.add_child(d)
|
||||
} else {
|
||||
decl = new_decl(methodof, decl_methods_stub, scope)
|
||||
decl.add_child(d)
|
||||
pkg.add_child(decl)
|
||||
}
|
||||
} else {
|
||||
decl := pkg.find_child(d.name)
|
||||
if decl != nil {
|
||||
decl.expand_or_replace(d)
|
||||
} else {
|
||||
pkg.add_child(d)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// package_cache
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type package_cache map[string]*package_file_cache
|
||||
|
||||
func new_package_cache() package_cache {
|
||||
m := make(package_cache)
|
||||
|
||||
// add built-in "unsafe" package
|
||||
m.add_builtin_unsafe_package()
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
// Function fills 'ps' set with packages from 'packages' import information.
|
||||
// In case if package is not in the cache, it creates one and adds one to the cache.
|
||||
func (c package_cache) append_packages(ps map[string]*package_file_cache, pkgs []package_import) {
|
||||
for _, m := range pkgs {
|
||||
if _, ok := ps[m.path]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if mod, ok := c[m.path]; ok {
|
||||
ps[m.path] = mod
|
||||
} else {
|
||||
mod = new_package_file_cache(m.path)
|
||||
ps[m.path] = mod
|
||||
c[m.path] = mod
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var g_builtin_unsafe_package = []byte(`
|
||||
import
|
||||
$$
|
||||
package unsafe
|
||||
type @"".Pointer uintptr
|
||||
func @"".Offsetof (? any) uintptr
|
||||
func @"".Sizeof (? any) uintptr
|
||||
func @"".Alignof (? any) uintptr
|
||||
func @"".Typeof (i interface { }) interface { }
|
||||
func @"".Reflect (i interface { }) (typ interface { }, addr @"".Pointer)
|
||||
func @"".Unreflect (typ interface { }, addr @"".Pointer) interface { }
|
||||
func @"".New (typ interface { }) @"".Pointer
|
||||
func @"".NewArray (typ interface { }, n int) @"".Pointer
|
||||
|
||||
$$
|
||||
`)
|
||||
|
||||
func (c package_cache) add_builtin_unsafe_package() {
|
||||
pkg := new_package_file_cache_forever("unsafe", "unsafe")
|
||||
pkg.process_package_data(g_builtin_unsafe_package)
|
||||
c["unsafe"] = pkg
|
||||
}
|
|
@ -0,0 +1,762 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// gc_bin_parser
|
||||
//
|
||||
// The following part of the code may contain portions of the code from the Go
|
||||
// standard library, which tells me to retain their copyright notice:
|
||||
//
|
||||
// Copyright (c) 2012 The Go Authors. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type gc_bin_parser struct {
|
||||
data []byte
|
||||
buf []byte // for reading strings
|
||||
version int
|
||||
|
||||
// object lists
|
||||
strList []string // in order of appearance
|
||||
pkgList []string // in order of appearance
|
||||
typList []ast.Expr // in order of appearance
|
||||
callback func(pkg string, decl ast.Decl)
|
||||
pfc *package_file_cache
|
||||
trackAllTypes bool
|
||||
|
||||
// position encoding
|
||||
posInfoFormat bool
|
||||
prevFile string
|
||||
prevLine int
|
||||
|
||||
// debugging support
|
||||
debugFormat bool
|
||||
read int // bytes read
|
||||
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) init(data []byte, pfc *package_file_cache) {
|
||||
p.data = data
|
||||
p.version = -1 // unknown version
|
||||
p.strList = []string{""} // empty string is mapped to 0
|
||||
p.pfc = pfc
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) parse_export(callback func(string, ast.Decl)) {
|
||||
p.callback = callback
|
||||
|
||||
// read version info
|
||||
var versionstr string
|
||||
if b := p.rawByte(); b == 'c' || b == 'd' {
|
||||
// Go1.7 encoding; first byte encodes low-level
|
||||
// encoding format (compact vs debug).
|
||||
// For backward-compatibility only (avoid problems with
|
||||
// old installed packages). Newly compiled packages use
|
||||
// the extensible format string.
|
||||
// TODO(gri) Remove this support eventually; after Go1.8.
|
||||
if b == 'd' {
|
||||
p.debugFormat = true
|
||||
}
|
||||
p.trackAllTypes = p.rawByte() == 'a'
|
||||
p.posInfoFormat = p.int() != 0
|
||||
versionstr = p.string()
|
||||
if versionstr == "v1" {
|
||||
p.version = 0
|
||||
}
|
||||
} else {
|
||||
// Go1.8 extensible encoding
|
||||
// read version string and extract version number (ignore anything after the version number)
|
||||
versionstr = p.rawStringln(b)
|
||||
if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" {
|
||||
if v, err := strconv.Atoi(s[1]); err == nil && v > 0 {
|
||||
p.version = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// read version specific flags - extend as necessary
|
||||
switch p.version {
|
||||
// case 4:
|
||||
// ...
|
||||
// fallthrough
|
||||
case 3, 2, 1:
|
||||
// Support for Go 1.8 type aliases will be added very
|
||||
// soon (Oct 2016). In the meantime, we make a
|
||||
// best-effort attempt to read v3 export data, failing
|
||||
// if we encounter a type alias. This allows the
|
||||
// automated builders to make progress since
|
||||
// type aliases are not yet used in practice.
|
||||
// TODO(gri): add support for type aliases.
|
||||
p.debugFormat = p.rawStringln(p.rawByte()) == "debug"
|
||||
p.trackAllTypes = p.int() != 0
|
||||
p.posInfoFormat = p.int() != 0
|
||||
case 0:
|
||||
// Go1.7 encoding format - nothing to do here
|
||||
default:
|
||||
panic(fmt.Errorf("unknown export format version %d (%q)", p.version, versionstr))
|
||||
}
|
||||
|
||||
// --- generic export data ---
|
||||
|
||||
// populate typList with predeclared "known" types
|
||||
p.typList = append(p.typList, predeclared...)
|
||||
|
||||
// read package data
|
||||
p.pfc.defalias = p.pkg()[1:]
|
||||
|
||||
// read objects of phase 1 only (see cmd/compiler/internal/gc/bexport.go)
|
||||
objcount := 0
|
||||
for {
|
||||
tag := p.tagOrIndex()
|
||||
if tag == endTag {
|
||||
break
|
||||
}
|
||||
p.obj(tag)
|
||||
objcount++
|
||||
}
|
||||
|
||||
// self-verification
|
||||
if count := p.int(); count != objcount {
|
||||
panic(fmt.Sprintf("got %d objects; want %d", objcount, count))
|
||||
}
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) pkg() string {
|
||||
// if the package was seen before, i is its index (>= 0)
|
||||
i := p.tagOrIndex()
|
||||
if i >= 0 {
|
||||
return p.pkgList[i]
|
||||
}
|
||||
|
||||
// otherwise, i is the package tag (< 0)
|
||||
if i != packageTag {
|
||||
panic(fmt.Sprintf("unexpected package tag %d", i))
|
||||
}
|
||||
|
||||
// read package data
|
||||
name := p.string()
|
||||
path := p.string()
|
||||
|
||||
// we should never see an empty package name
|
||||
if name == "" {
|
||||
panic("empty package name in import")
|
||||
}
|
||||
|
||||
// an empty path denotes the package we are currently importing;
|
||||
// it must be the first package we see
|
||||
if (path == "") != (len(p.pkgList) == 0) {
|
||||
panic(fmt.Sprintf("package path %q for pkg index %d", path, len(p.pkgList)))
|
||||
}
|
||||
|
||||
var fullName string
|
||||
if path != "" {
|
||||
fullName = "!" + path + "!" + name
|
||||
p.pfc.add_package_to_scope(fullName, path)
|
||||
} else {
|
||||
fullName = "#" + name
|
||||
}
|
||||
|
||||
// if the package was imported before, use that one; otherwise create a new one
|
||||
p.pkgList = append(p.pkgList, fullName)
|
||||
return p.pkgList[len(p.pkgList)-1]
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) obj(tag int) {
|
||||
switch tag {
|
||||
case constTag:
|
||||
p.pos()
|
||||
pkg, name := p.qualifiedName()
|
||||
typ := p.typ("")
|
||||
p.skipValue() // ignore const value, gocode's not interested
|
||||
p.callback(pkg, &ast.GenDecl{
|
||||
Tok: token.CONST,
|
||||
Specs: []ast.Spec{
|
||||
&ast.ValueSpec{
|
||||
Names: []*ast.Ident{ast.NewIdent(name)},
|
||||
Type: typ,
|
||||
Values: []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}},
|
||||
},
|
||||
},
|
||||
})
|
||||
case typeTag:
|
||||
_ = p.typ("")
|
||||
|
||||
case varTag:
|
||||
p.pos()
|
||||
pkg, name := p.qualifiedName()
|
||||
typ := p.typ("")
|
||||
p.callback(pkg, &ast.GenDecl{
|
||||
Tok: token.VAR,
|
||||
Specs: []ast.Spec{
|
||||
&ast.ValueSpec{
|
||||
Names: []*ast.Ident{ast.NewIdent(name)},
|
||||
Type: typ,
|
||||
},
|
||||
},
|
||||
})
|
||||
case funcTag:
|
||||
p.pos()
|
||||
pkg, name := p.qualifiedName()
|
||||
params := p.paramList()
|
||||
results := p.paramList()
|
||||
p.callback(pkg, &ast.FuncDecl{
|
||||
Name: ast.NewIdent(name),
|
||||
Type: &ast.FuncType{Params: params, Results: results},
|
||||
})
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected object tag %d", tag))
|
||||
}
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) pos() {
|
||||
if !p.posInfoFormat {
|
||||
return
|
||||
}
|
||||
|
||||
file := p.prevFile
|
||||
line := p.prevLine
|
||||
if delta := p.int(); delta != 0 {
|
||||
// line changed
|
||||
line += delta
|
||||
} else if n := p.int(); n >= 0 {
|
||||
// file changed
|
||||
file = p.prevFile[:n] + p.string()
|
||||
p.prevFile = file
|
||||
line = p.int()
|
||||
}
|
||||
p.prevLine = line
|
||||
|
||||
// TODO(gri) register new position
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) qualifiedName() (pkg string, name string) {
|
||||
name = p.string()
|
||||
pkg = p.pkg()
|
||||
return pkg, name
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) reserveMaybe() int {
|
||||
if p.trackAllTypes {
|
||||
p.typList = append(p.typList, nil)
|
||||
return len(p.typList) - 1
|
||||
} else {
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) recordMaybe(idx int, t ast.Expr) ast.Expr {
|
||||
if idx == -1 {
|
||||
return t
|
||||
}
|
||||
p.typList[idx] = t
|
||||
return t
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) record(t ast.Expr) {
|
||||
p.typList = append(p.typList, t)
|
||||
}
|
||||
|
||||
// parent is the package which declared the type; parent == nil means
|
||||
// the package currently imported. The parent package is needed for
|
||||
// exported struct fields and interface methods which don't contain
|
||||
// explicit package information in the export data.
|
||||
func (p *gc_bin_parser) typ(parent string) ast.Expr {
|
||||
// if the type was seen before, i is its index (>= 0)
|
||||
i := p.tagOrIndex()
|
||||
if i >= 0 {
|
||||
return p.typList[i]
|
||||
}
|
||||
|
||||
// otherwise, i is the type tag (< 0)
|
||||
switch i {
|
||||
case namedTag:
|
||||
// read type object
|
||||
p.pos()
|
||||
parent, name := p.qualifiedName()
|
||||
tdecl := &ast.GenDecl{
|
||||
Tok: token.TYPE,
|
||||
Specs: []ast.Spec{
|
||||
&ast.TypeSpec{
|
||||
Name: ast.NewIdent(name),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// record it right away (underlying type can contain refs to t)
|
||||
t := &ast.SelectorExpr{X: ast.NewIdent(parent), Sel: ast.NewIdent(name)}
|
||||
p.record(t)
|
||||
|
||||
// parse underlying type
|
||||
t0 := p.typ(parent)
|
||||
tdecl.Specs[0].(*ast.TypeSpec).Type = t0
|
||||
|
||||
p.callback(parent, tdecl)
|
||||
|
||||
// interfaces have no methods
|
||||
if _, ok := t0.(*ast.InterfaceType); ok {
|
||||
return t
|
||||
}
|
||||
|
||||
// read associated methods
|
||||
for i := p.int(); i > 0; i-- {
|
||||
// TODO(gri) replace this with something closer to fieldName
|
||||
p.pos()
|
||||
name := p.string()
|
||||
if !exported(name) {
|
||||
p.pkg()
|
||||
}
|
||||
|
||||
recv := p.paramList()
|
||||
params := p.paramList()
|
||||
results := p.paramList()
|
||||
p.int() // go:nointerface pragma - discarded
|
||||
|
||||
strip_method_receiver(recv)
|
||||
p.callback(parent, &ast.FuncDecl{
|
||||
Recv: recv,
|
||||
Name: ast.NewIdent(name),
|
||||
Type: &ast.FuncType{Params: params, Results: results},
|
||||
})
|
||||
}
|
||||
return t
|
||||
case arrayTag:
|
||||
i := p.reserveMaybe()
|
||||
n := p.int64()
|
||||
elt := p.typ(parent)
|
||||
return p.recordMaybe(i, &ast.ArrayType{
|
||||
Len: &ast.BasicLit{Kind: token.INT, Value: fmt.Sprint(n)},
|
||||
Elt: elt,
|
||||
})
|
||||
|
||||
case sliceTag:
|
||||
i := p.reserveMaybe()
|
||||
elt := p.typ(parent)
|
||||
return p.recordMaybe(i, &ast.ArrayType{Len: nil, Elt: elt})
|
||||
|
||||
case dddTag:
|
||||
i := p.reserveMaybe()
|
||||
elt := p.typ(parent)
|
||||
return p.recordMaybe(i, &ast.Ellipsis{Elt: elt})
|
||||
|
||||
case structTag:
|
||||
i := p.reserveMaybe()
|
||||
return p.recordMaybe(i, p.structType(parent))
|
||||
|
||||
case pointerTag:
|
||||
i := p.reserveMaybe()
|
||||
elt := p.typ(parent)
|
||||
return p.recordMaybe(i, &ast.StarExpr{X: elt})
|
||||
|
||||
case signatureTag:
|
||||
i := p.reserveMaybe()
|
||||
params := p.paramList()
|
||||
results := p.paramList()
|
||||
return p.recordMaybe(i, &ast.FuncType{Params: params, Results: results})
|
||||
|
||||
case interfaceTag:
|
||||
i := p.reserveMaybe()
|
||||
if p.int() != 0 {
|
||||
panic("unexpected embedded interface")
|
||||
}
|
||||
methods := p.methodList(parent)
|
||||
return p.recordMaybe(i, &ast.InterfaceType{Methods: &ast.FieldList{List: methods}})
|
||||
|
||||
case mapTag:
|
||||
i := p.reserveMaybe()
|
||||
key := p.typ(parent)
|
||||
val := p.typ(parent)
|
||||
return p.recordMaybe(i, &ast.MapType{Key: key, Value: val})
|
||||
|
||||
case chanTag:
|
||||
i := p.reserveMaybe()
|
||||
dir := ast.SEND | ast.RECV
|
||||
switch d := p.int(); d {
|
||||
case 1:
|
||||
dir = ast.RECV
|
||||
case 2:
|
||||
dir = ast.SEND
|
||||
case 3:
|
||||
// already set
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected channel dir %d", d))
|
||||
}
|
||||
elt := p.typ(parent)
|
||||
return p.recordMaybe(i, &ast.ChanType{Dir: dir, Value: elt})
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected type tag %d", i))
|
||||
}
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) structType(parent string) *ast.StructType {
|
||||
var fields []*ast.Field
|
||||
if n := p.int(); n > 0 {
|
||||
fields = make([]*ast.Field, n)
|
||||
for i := range fields {
|
||||
fields[i] = p.field(parent)
|
||||
p.string() // tag, not interested in tags
|
||||
}
|
||||
}
|
||||
return &ast.StructType{Fields: &ast.FieldList{List: fields}}
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) field(parent string) *ast.Field {
|
||||
p.pos()
|
||||
_, name := p.fieldName(parent)
|
||||
typ := p.typ(parent)
|
||||
|
||||
var names []*ast.Ident
|
||||
if name != "" {
|
||||
names = []*ast.Ident{ast.NewIdent(name)}
|
||||
}
|
||||
return &ast.Field{
|
||||
Names: names,
|
||||
Type: typ,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) methodList(parent string) (methods []*ast.Field) {
|
||||
if n := p.int(); n > 0 {
|
||||
methods = make([]*ast.Field, n)
|
||||
for i := range methods {
|
||||
methods[i] = p.method(parent)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) method(parent string) *ast.Field {
|
||||
p.pos()
|
||||
_, name := p.fieldName(parent)
|
||||
params := p.paramList()
|
||||
results := p.paramList()
|
||||
return &ast.Field{
|
||||
Names: []*ast.Ident{ast.NewIdent(name)},
|
||||
Type: &ast.FuncType{Params: params, Results: results},
|
||||
}
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) fieldName(parent string) (string, string) {
|
||||
name := p.string()
|
||||
pkg := parent
|
||||
if p.version == 0 && name == "_" {
|
||||
// versions < 1 don't export a package for _ fields
|
||||
// TODO: remove once versions are not supported anymore
|
||||
return pkg, name
|
||||
}
|
||||
if name != "" && !exported(name) {
|
||||
// explicitly qualified field
|
||||
if name == "?" {
|
||||
name = ""
|
||||
}
|
||||
pkg = p.pkg()
|
||||
}
|
||||
return pkg, name
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) paramList() *ast.FieldList {
|
||||
n := p.int()
|
||||
if n == 0 {
|
||||
return nil
|
||||
}
|
||||
// negative length indicates unnamed parameters
|
||||
named := true
|
||||
if n < 0 {
|
||||
n = -n
|
||||
named = false
|
||||
}
|
||||
// n > 0
|
||||
flds := make([]*ast.Field, n)
|
||||
for i := range flds {
|
||||
flds[i] = p.param(named)
|
||||
}
|
||||
return &ast.FieldList{List: flds}
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) param(named bool) *ast.Field {
|
||||
t := p.typ("")
|
||||
|
||||
name := "?"
|
||||
if named {
|
||||
name = p.string()
|
||||
if name == "" {
|
||||
panic("expected named parameter")
|
||||
}
|
||||
if name != "_" {
|
||||
p.pkg()
|
||||
}
|
||||
if i := strings.Index(name, "·"); i > 0 {
|
||||
name = name[:i] // cut off gc-specific parameter numbering
|
||||
}
|
||||
}
|
||||
|
||||
// read and discard compiler-specific info
|
||||
p.string()
|
||||
|
||||
return &ast.Field{
|
||||
Names: []*ast.Ident{ast.NewIdent(name)},
|
||||
Type: t,
|
||||
}
|
||||
}
|
||||
|
||||
func exported(name string) bool {
|
||||
ch, _ := utf8.DecodeRuneInString(name)
|
||||
return unicode.IsUpper(ch)
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) skipValue() {
|
||||
switch tag := p.tagOrIndex(); tag {
|
||||
case falseTag, trueTag:
|
||||
case int64Tag:
|
||||
p.int64()
|
||||
case floatTag:
|
||||
p.float()
|
||||
case complexTag:
|
||||
p.float()
|
||||
p.float()
|
||||
case stringTag:
|
||||
p.string()
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected value tag %d", tag))
|
||||
}
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) float() {
|
||||
sign := p.int()
|
||||
if sign == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
p.int() // exp
|
||||
p.string() // mant
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Low-level decoders
|
||||
|
||||
func (p *gc_bin_parser) tagOrIndex() int {
|
||||
if p.debugFormat {
|
||||
p.marker('t')
|
||||
}
|
||||
|
||||
return int(p.rawInt64())
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) int() int {
|
||||
x := p.int64()
|
||||
if int64(int(x)) != x {
|
||||
panic("exported integer too large")
|
||||
}
|
||||
return int(x)
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) int64() int64 {
|
||||
if p.debugFormat {
|
||||
p.marker('i')
|
||||
}
|
||||
|
||||
return p.rawInt64()
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) string() string {
|
||||
if p.debugFormat {
|
||||
p.marker('s')
|
||||
}
|
||||
// if the string was seen before, i is its index (>= 0)
|
||||
// (the empty string is at index 0)
|
||||
i := p.rawInt64()
|
||||
if i >= 0 {
|
||||
return p.strList[i]
|
||||
}
|
||||
// otherwise, i is the negative string length (< 0)
|
||||
if n := int(-i); n <= cap(p.buf) {
|
||||
p.buf = p.buf[:n]
|
||||
} else {
|
||||
p.buf = make([]byte, n)
|
||||
}
|
||||
for i := range p.buf {
|
||||
p.buf[i] = p.rawByte()
|
||||
}
|
||||
s := string(p.buf)
|
||||
p.strList = append(p.strList, s)
|
||||
return s
|
||||
}
|
||||
|
||||
func (p *gc_bin_parser) marker(want byte) {
|
||||
if got := p.rawByte(); got != want {
|
||||
panic(fmt.Sprintf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read))
|
||||
}
|
||||
|
||||
pos := p.read
|
||||
if n := int(p.rawInt64()); n != pos {
|
||||
panic(fmt.Sprintf("incorrect position: got %d; want %d", n, pos))
|
||||
}
|
||||
}
|
||||
|
||||
// rawInt64 should only be used by low-level decoders.
|
||||
func (p *gc_bin_parser) rawInt64() int64 {
|
||||
i, err := binary.ReadVarint(p)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("read error: %v", err))
|
||||
}
|
||||
return i
|
||||
}
|
||||
|
||||
// rawStringln should only be used to read the initial version string.
|
||||
func (p *gc_bin_parser) rawStringln(b byte) string {
|
||||
p.buf = p.buf[:0]
|
||||
for b != '\n' {
|
||||
p.buf = append(p.buf, b)
|
||||
b = p.rawByte()
|
||||
}
|
||||
return string(p.buf)
|
||||
}
|
||||
|
||||
// needed for binary.ReadVarint in rawInt64
|
||||
func (p *gc_bin_parser) ReadByte() (byte, error) {
|
||||
return p.rawByte(), nil
|
||||
}
|
||||
|
||||
// byte is the bottleneck interface for reading p.data.
|
||||
// It unescapes '|' 'S' to '$' and '|' '|' to '|'.
|
||||
// rawByte should only be used by low-level decoders.
|
||||
func (p *gc_bin_parser) rawByte() byte {
|
||||
b := p.data[0]
|
||||
r := 1
|
||||
if b == '|' {
|
||||
b = p.data[1]
|
||||
r = 2
|
||||
switch b {
|
||||
case 'S':
|
||||
b = '$'
|
||||
case '|':
|
||||
// nothing to do
|
||||
default:
|
||||
panic("unexpected escape sequence in export data")
|
||||
}
|
||||
}
|
||||
p.data = p.data[r:]
|
||||
p.read += r
|
||||
return b
|
||||
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Export format
|
||||
|
||||
// Tags. Must be < 0.
|
||||
const (
|
||||
// Objects
|
||||
packageTag = -(iota + 1)
|
||||
constTag
|
||||
typeTag
|
||||
varTag
|
||||
funcTag
|
||||
endTag
|
||||
|
||||
// Types
|
||||
namedTag
|
||||
arrayTag
|
||||
sliceTag
|
||||
dddTag
|
||||
structTag
|
||||
pointerTag
|
||||
signatureTag
|
||||
interfaceTag
|
||||
mapTag
|
||||
chanTag
|
||||
|
||||
// Values
|
||||
falseTag
|
||||
trueTag
|
||||
int64Tag
|
||||
floatTag
|
||||
fractionTag // not used by gc
|
||||
complexTag
|
||||
stringTag
|
||||
unknownTag // not used by gc (only appears in packages with errors)
|
||||
)
|
||||
|
||||
var predeclared = []ast.Expr{
|
||||
// basic types
|
||||
ast.NewIdent("bool"),
|
||||
ast.NewIdent("int"),
|
||||
ast.NewIdent("int8"),
|
||||
ast.NewIdent("int16"),
|
||||
ast.NewIdent("int32"),
|
||||
ast.NewIdent("int64"),
|
||||
ast.NewIdent("uint"),
|
||||
ast.NewIdent("uint8"),
|
||||
ast.NewIdent("uint16"),
|
||||
ast.NewIdent("uint32"),
|
||||
ast.NewIdent("uint64"),
|
||||
ast.NewIdent("uintptr"),
|
||||
ast.NewIdent("float32"),
|
||||
ast.NewIdent("float64"),
|
||||
ast.NewIdent("complex64"),
|
||||
ast.NewIdent("complex128"),
|
||||
ast.NewIdent("string"),
|
||||
|
||||
// aliases
|
||||
ast.NewIdent("byte"),
|
||||
ast.NewIdent("rune"),
|
||||
|
||||
// error
|
||||
ast.NewIdent("error"),
|
||||
|
||||
// TODO(nsf): don't think those are used in just package type info,
|
||||
// maybe for consts, but we are not interested in that
|
||||
// untyped types
|
||||
ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedBool],
|
||||
ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedInt],
|
||||
ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedRune],
|
||||
ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedFloat],
|
||||
ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedComplex],
|
||||
ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedString],
|
||||
ast.NewIdent(">_<"), // TODO: types.Typ[types.UntypedNil],
|
||||
|
||||
// package unsafe
|
||||
&ast.SelectorExpr{X: ast.NewIdent("unsafe"), Sel: ast.NewIdent("Pointer")},
|
||||
|
||||
// invalid type
|
||||
ast.NewIdent(">_<"), // TODO: types.Typ[types.Invalid], // only appears in packages with errors
|
||||
|
||||
// used internally by gc; never used by this package or in .a files
|
||||
ast.NewIdent("any"),
|
||||
}
|
|
@ -0,0 +1,678 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strconv"
|
||||
"text/scanner"
|
||||
)
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// gc_parser
|
||||
//
|
||||
// The following part of the code may contain portions of the code from the Go
|
||||
// standard library, which tells me to retain their copyright notice:
|
||||
//
|
||||
// Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type gc_parser struct {
|
||||
scanner scanner.Scanner
|
||||
tok rune
|
||||
lit string
|
||||
path_to_name map[string]string
|
||||
beautify bool
|
||||
pfc *package_file_cache
|
||||
}
|
||||
|
||||
func (p *gc_parser) init(data []byte, pfc *package_file_cache) {
|
||||
p.scanner.Init(bytes.NewReader(data))
|
||||
p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
|
||||
p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanStrings |
|
||||
scanner.ScanComments | scanner.ScanChars | scanner.SkipComments
|
||||
p.scanner.Whitespace = 1<<'\t' | 1<<' ' | 1<<'\r' | 1<<'\v' | 1<<'\f'
|
||||
p.scanner.Filename = "package.go"
|
||||
p.next()
|
||||
// and the built-in "unsafe" package to the path_to_name map
|
||||
p.path_to_name = map[string]string{"unsafe": "unsafe"}
|
||||
p.pfc = pfc
|
||||
}
|
||||
|
||||
func (p *gc_parser) next() {
|
||||
p.tok = p.scanner.Scan()
|
||||
switch p.tok {
|
||||
case scanner.Ident, scanner.Int, scanner.String:
|
||||
p.lit = p.scanner.TokenText()
|
||||
default:
|
||||
p.lit = ""
|
||||
}
|
||||
}
|
||||
|
||||
func (p *gc_parser) error(msg string) {
|
||||
panic(errors.New(msg))
|
||||
}
|
||||
|
||||
func (p *gc_parser) errorf(format string, args ...interface{}) {
|
||||
p.error(fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
func (p *gc_parser) expect(tok rune) string {
|
||||
lit := p.lit
|
||||
if p.tok != tok {
|
||||
p.errorf("expected %s, got %s (%q)", scanner.TokenString(tok),
|
||||
scanner.TokenString(p.tok), lit)
|
||||
}
|
||||
p.next()
|
||||
return lit
|
||||
}
|
||||
|
||||
func (p *gc_parser) expect_keyword(keyword string) {
|
||||
lit := p.expect(scanner.Ident)
|
||||
if lit != keyword {
|
||||
p.errorf("expected keyword: %s, got: %q", keyword, lit)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *gc_parser) expect_special(what string) {
|
||||
i := 0
|
||||
for i < len(what) {
|
||||
if p.tok != rune(what[i]) {
|
||||
break
|
||||
}
|
||||
|
||||
nc := p.scanner.Peek()
|
||||
if i != len(what)-1 && nc <= ' ' {
|
||||
break
|
||||
}
|
||||
|
||||
p.next()
|
||||
i++
|
||||
}
|
||||
|
||||
if i < len(what) {
|
||||
p.errorf("expected: %q, got: %q", what, what[0:i])
|
||||
}
|
||||
}
|
||||
|
||||
// dotIdentifier = "?" | ( ident | '·' ) { ident | int | '·' } .
|
||||
// we're doing lexer job here, kind of
|
||||
func (p *gc_parser) parse_dot_ident() string {
|
||||
if p.tok == '?' {
|
||||
p.next()
|
||||
return "?"
|
||||
}
|
||||
|
||||
ident := ""
|
||||
sep := 'x'
|
||||
i, j := 0, -1
|
||||
for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
|
||||
ident += p.lit
|
||||
if p.tok == '·' {
|
||||
ident += "·"
|
||||
j = i
|
||||
i++
|
||||
}
|
||||
i += len(p.lit)
|
||||
sep = p.scanner.Peek()
|
||||
p.next()
|
||||
}
|
||||
// middot = \xc2\xb7
|
||||
if j != -1 && i > j+1 {
|
||||
c := ident[j+2]
|
||||
if c >= '0' && c <= '9' {
|
||||
ident = ident[0:j]
|
||||
}
|
||||
}
|
||||
return ident
|
||||
}
|
||||
|
||||
// ImportPath = string_lit .
|
||||
// quoted name of the path, but we return it as an identifier, taking an alias
|
||||
// from 'pathToAlias' map, it is filled by import statements
|
||||
func (p *gc_parser) parse_package() *ast.Ident {
|
||||
path, err := strconv.Unquote(p.expect(scanner.String))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return ast.NewIdent(path)
|
||||
}
|
||||
|
||||
// ExportedName = "@" ImportPath "." dotIdentifier .
|
||||
func (p *gc_parser) parse_exported_name() *ast.SelectorExpr {
|
||||
p.expect('@')
|
||||
pkg := p.parse_package()
|
||||
if pkg.Name == "" {
|
||||
pkg.Name = "#" + p.pfc.defalias
|
||||
} else {
|
||||
pkg.Name = p.path_to_name[pkg.Name]
|
||||
}
|
||||
p.expect('.')
|
||||
name := ast.NewIdent(p.parse_dot_ident())
|
||||
return &ast.SelectorExpr{X: pkg, Sel: name}
|
||||
}
|
||||
|
||||
// Name = identifier | "?" | ExportedName .
|
||||
func (p *gc_parser) parse_name() (string, ast.Expr) {
|
||||
switch p.tok {
|
||||
case scanner.Ident:
|
||||
name := p.lit
|
||||
p.next()
|
||||
return name, ast.NewIdent(name)
|
||||
case '?':
|
||||
p.next()
|
||||
return "?", ast.NewIdent("?")
|
||||
case '@':
|
||||
en := p.parse_exported_name()
|
||||
return en.Sel.Name, en
|
||||
}
|
||||
p.error("name expected")
|
||||
return "", nil
|
||||
}
|
||||
|
||||
// Field = Name Type [ string_lit ] .
|
||||
func (p *gc_parser) parse_field() *ast.Field {
|
||||
var tag string
|
||||
name, _ := p.parse_name()
|
||||
typ := p.parse_type()
|
||||
if p.tok == scanner.String {
|
||||
tag = p.expect(scanner.String)
|
||||
}
|
||||
|
||||
var names []*ast.Ident
|
||||
if name != "?" {
|
||||
names = []*ast.Ident{ast.NewIdent(name)}
|
||||
}
|
||||
|
||||
return &ast.Field{
|
||||
Names: names,
|
||||
Type: typ,
|
||||
Tag: &ast.BasicLit{Kind: token.STRING, Value: tag},
|
||||
}
|
||||
}
|
||||
|
||||
// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
|
||||
func (p *gc_parser) parse_parameter() *ast.Field {
|
||||
// name
|
||||
name, _ := p.parse_name()
|
||||
|
||||
// type
|
||||
var typ ast.Expr
|
||||
if p.tok == '.' {
|
||||
p.expect_special("...")
|
||||
typ = &ast.Ellipsis{Elt: p.parse_type()}
|
||||
} else {
|
||||
typ = p.parse_type()
|
||||
}
|
||||
|
||||
var tag string
|
||||
if p.tok == scanner.String {
|
||||
tag = p.expect(scanner.String)
|
||||
}
|
||||
|
||||
return &ast.Field{
|
||||
Names: []*ast.Ident{ast.NewIdent(name)},
|
||||
Type: typ,
|
||||
Tag: &ast.BasicLit{Kind: token.STRING, Value: tag},
|
||||
}
|
||||
}
|
||||
|
||||
// Parameters = "(" [ ParameterList ] ")" .
|
||||
// ParameterList = { Parameter "," } Parameter .
|
||||
func (p *gc_parser) parse_parameters() *ast.FieldList {
|
||||
flds := []*ast.Field{}
|
||||
parse_parameter := func() {
|
||||
par := p.parse_parameter()
|
||||
flds = append(flds, par)
|
||||
}
|
||||
|
||||
p.expect('(')
|
||||
if p.tok != ')' {
|
||||
parse_parameter()
|
||||
for p.tok == ',' {
|
||||
p.next()
|
||||
parse_parameter()
|
||||
}
|
||||
}
|
||||
p.expect(')')
|
||||
return &ast.FieldList{List: flds}
|
||||
}
|
||||
|
||||
// Signature = Parameters [ Result ] .
|
||||
// Result = Type | Parameters .
|
||||
func (p *gc_parser) parse_signature() *ast.FuncType {
|
||||
var params *ast.FieldList
|
||||
var results *ast.FieldList
|
||||
|
||||
params = p.parse_parameters()
|
||||
switch p.tok {
|
||||
case scanner.Ident, '[', '*', '<', '@':
|
||||
fld := &ast.Field{Type: p.parse_type()}
|
||||
results = &ast.FieldList{List: []*ast.Field{fld}}
|
||||
case '(':
|
||||
results = p.parse_parameters()
|
||||
}
|
||||
return &ast.FuncType{Params: params, Results: results}
|
||||
}
|
||||
|
||||
// MethodOrEmbedSpec = Name [ Signature ] .
|
||||
func (p *gc_parser) parse_method_or_embed_spec() *ast.Field {
|
||||
name, nameexpr := p.parse_name()
|
||||
if p.tok == '(' {
|
||||
typ := p.parse_signature()
|
||||
return &ast.Field{
|
||||
Names: []*ast.Ident{ast.NewIdent(name)},
|
||||
Type: typ,
|
||||
}
|
||||
}
|
||||
|
||||
return &ast.Field{
|
||||
Type: nameexpr,
|
||||
}
|
||||
}
|
||||
|
||||
// int_lit = [ "-" | "+" ] { "0" ... "9" } .
|
||||
func (p *gc_parser) parse_int() {
|
||||
switch p.tok {
|
||||
case '-', '+':
|
||||
p.next()
|
||||
}
|
||||
p.expect(scanner.Int)
|
||||
}
|
||||
|
||||
// number = int_lit [ "p" int_lit ] .
|
||||
func (p *gc_parser) parse_number() {
|
||||
p.parse_int()
|
||||
if p.lit == "p" {
|
||||
p.next()
|
||||
p.parse_int()
|
||||
}
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------------
|
||||
// gc_parser.types
|
||||
//-------------------------------------------------------------------------------
|
||||
|
||||
// InterfaceType = "interface" "{" [ MethodOrEmbedList ] "}" .
|
||||
// MethodOrEmbedList = MethodOrEmbedSpec { ";" MethodOrEmbedSpec } .
|
||||
func (p *gc_parser) parse_interface_type() ast.Expr {
|
||||
var methods []*ast.Field
|
||||
parse_method := func() {
|
||||
meth := p.parse_method_or_embed_spec()
|
||||
methods = append(methods, meth)
|
||||
}
|
||||
|
||||
p.expect_keyword("interface")
|
||||
p.expect('{')
|
||||
if p.tok != '}' {
|
||||
parse_method()
|
||||
for p.tok == ';' {
|
||||
p.next()
|
||||
parse_method()
|
||||
}
|
||||
}
|
||||
p.expect('}')
|
||||
return &ast.InterfaceType{Methods: &ast.FieldList{List: methods}}
|
||||
}
|
||||
|
||||
// StructType = "struct" "{" [ FieldList ] "}" .
|
||||
// FieldList = Field { ";" Field } .
|
||||
func (p *gc_parser) parse_struct_type() ast.Expr {
|
||||
var fields []*ast.Field
|
||||
parse_field := func() {
|
||||
fld := p.parse_field()
|
||||
fields = append(fields, fld)
|
||||
}
|
||||
|
||||
p.expect_keyword("struct")
|
||||
p.expect('{')
|
||||
if p.tok != '}' {
|
||||
parse_field()
|
||||
for p.tok == ';' {
|
||||
p.next()
|
||||
parse_field()
|
||||
}
|
||||
}
|
||||
p.expect('}')
|
||||
return &ast.StructType{Fields: &ast.FieldList{List: fields}}
|
||||
}
|
||||
|
||||
// MapType = "map" "[" Type "]" Type .
|
||||
func (p *gc_parser) parse_map_type() ast.Expr {
|
||||
p.expect_keyword("map")
|
||||
p.expect('[')
|
||||
key := p.parse_type()
|
||||
p.expect(']')
|
||||
elt := p.parse_type()
|
||||
return &ast.MapType{Key: key, Value: elt}
|
||||
}
|
||||
|
||||
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
|
||||
func (p *gc_parser) parse_chan_type() ast.Expr {
|
||||
dir := ast.SEND | ast.RECV
|
||||
if p.tok == scanner.Ident {
|
||||
p.expect_keyword("chan")
|
||||
if p.tok == '<' {
|
||||
p.expect_special("<-")
|
||||
dir = ast.SEND
|
||||
}
|
||||
} else {
|
||||
p.expect_special("<-")
|
||||
p.expect_keyword("chan")
|
||||
dir = ast.RECV
|
||||
}
|
||||
|
||||
elt := p.parse_type()
|
||||
return &ast.ChanType{Dir: dir, Value: elt}
|
||||
}
|
||||
|
||||
// ArrayOrSliceType = ArrayType | SliceType .
|
||||
// ArrayType = "[" int_lit "]" Type .
|
||||
// SliceType = "[" "]" Type .
|
||||
func (p *gc_parser) parse_array_or_slice_type() ast.Expr {
|
||||
p.expect('[')
|
||||
if p.tok == ']' {
|
||||
// SliceType
|
||||
p.next() // skip ']'
|
||||
return &ast.ArrayType{Len: nil, Elt: p.parse_type()}
|
||||
}
|
||||
|
||||
// ArrayType
|
||||
lit := p.expect(scanner.Int)
|
||||
p.expect(']')
|
||||
return &ast.ArrayType{
|
||||
Len: &ast.BasicLit{Kind: token.INT, Value: lit},
|
||||
Elt: p.parse_type(),
|
||||
}
|
||||
}
|
||||
|
||||
// Type =
|
||||
// BasicType | TypeName | ArrayType | SliceType | StructType |
|
||||
// PointerType | FuncType | InterfaceType | MapType | ChanType |
|
||||
// "(" Type ")" .
|
||||
// BasicType = ident .
|
||||
// TypeName = ExportedName .
|
||||
// SliceType = "[" "]" Type .
|
||||
// PointerType = "*" Type .
|
||||
// FuncType = "func" Signature .
|
||||
func (p *gc_parser) parse_type() ast.Expr {
|
||||
switch p.tok {
|
||||
case scanner.Ident:
|
||||
switch p.lit {
|
||||
case "struct":
|
||||
return p.parse_struct_type()
|
||||
case "func":
|
||||
p.next()
|
||||
return p.parse_signature()
|
||||
case "interface":
|
||||
return p.parse_interface_type()
|
||||
case "map":
|
||||
return p.parse_map_type()
|
||||
case "chan":
|
||||
return p.parse_chan_type()
|
||||
default:
|
||||
lit := p.lit
|
||||
p.next()
|
||||
return ast.NewIdent(lit)
|
||||
}
|
||||
case '@':
|
||||
return p.parse_exported_name()
|
||||
case '[':
|
||||
return p.parse_array_or_slice_type()
|
||||
case '*':
|
||||
p.next()
|
||||
return &ast.StarExpr{X: p.parse_type()}
|
||||
case '<':
|
||||
return p.parse_chan_type()
|
||||
case '(':
|
||||
p.next()
|
||||
typ := p.parse_type()
|
||||
p.expect(')')
|
||||
return typ
|
||||
}
|
||||
p.errorf("unexpected token: %s", scanner.TokenString(p.tok))
|
||||
return nil
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------------
|
||||
// gc_parser.declarations
|
||||
//-------------------------------------------------------------------------------
|
||||
|
||||
// ImportDecl = "import" identifier string_lit .
|
||||
func (p *gc_parser) parse_import_decl() {
|
||||
p.expect_keyword("import")
|
||||
alias := p.expect(scanner.Ident)
|
||||
path := p.parse_package()
|
||||
fullName := "!" + path.Name + "!" + alias
|
||||
p.path_to_name[path.Name] = fullName
|
||||
p.pfc.add_package_to_scope(fullName, path.Name)
|
||||
}
|
||||
|
||||
// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
|
||||
// Literal = bool_lit | int_lit | float_lit | complex_lit | string_lit .
|
||||
// bool_lit = "true" | "false" .
|
||||
// complex_lit = "(" float_lit "+" float_lit ")" .
|
||||
// rune_lit = "(" int_lit "+" int_lit ")" .
|
||||
// string_lit = `"` { unicode_char } `"` .
|
||||
func (p *gc_parser) parse_const_decl() (string, *ast.GenDecl) {
|
||||
// TODO: do we really need actual const value? gocode doesn't use this
|
||||
p.expect_keyword("const")
|
||||
name := p.parse_exported_name()
|
||||
|
||||
var typ ast.Expr
|
||||
if p.tok != '=' {
|
||||
typ = p.parse_type()
|
||||
}
|
||||
|
||||
p.expect('=')
|
||||
|
||||
// skip the value
|
||||
switch p.tok {
|
||||
case scanner.Ident:
|
||||
// must be bool, true or false
|
||||
p.next()
|
||||
case '-', '+', scanner.Int:
|
||||
// number
|
||||
p.parse_number()
|
||||
case '(':
|
||||
// complex_lit or rune_lit
|
||||
p.next() // skip '('
|
||||
if p.tok == scanner.Char {
|
||||
p.next()
|
||||
} else {
|
||||
p.parse_number()
|
||||
}
|
||||
p.expect('+')
|
||||
p.parse_number()
|
||||
p.expect(')')
|
||||
case scanner.Char:
|
||||
p.next()
|
||||
case scanner.String:
|
||||
p.next()
|
||||
default:
|
||||
p.error("expected literal")
|
||||
}
|
||||
|
||||
return name.X.(*ast.Ident).Name, &ast.GenDecl{
|
||||
Tok: token.CONST,
|
||||
Specs: []ast.Spec{
|
||||
&ast.ValueSpec{
|
||||
Names: []*ast.Ident{name.Sel},
|
||||
Type: typ,
|
||||
Values: []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// TypeDecl = "type" ExportedName Type .
|
||||
func (p *gc_parser) parse_type_decl() (string, *ast.GenDecl) {
|
||||
p.expect_keyword("type")
|
||||
name := p.parse_exported_name()
|
||||
typ := p.parse_type()
|
||||
return name.X.(*ast.Ident).Name, &ast.GenDecl{
|
||||
Tok: token.TYPE,
|
||||
Specs: []ast.Spec{
|
||||
&ast.TypeSpec{
|
||||
Name: name.Sel,
|
||||
Type: typ,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// VarDecl = "var" ExportedName Type .
|
||||
func (p *gc_parser) parse_var_decl() (string, *ast.GenDecl) {
|
||||
p.expect_keyword("var")
|
||||
name := p.parse_exported_name()
|
||||
typ := p.parse_type()
|
||||
return name.X.(*ast.Ident).Name, &ast.GenDecl{
|
||||
Tok: token.VAR,
|
||||
Specs: []ast.Spec{
|
||||
&ast.ValueSpec{
|
||||
Names: []*ast.Ident{name.Sel},
|
||||
Type: typ,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// FuncBody = "{" ... "}" .
|
||||
func (p *gc_parser) parse_func_body() {
|
||||
p.expect('{')
|
||||
for i := 1; i > 0; p.next() {
|
||||
switch p.tok {
|
||||
case '{':
|
||||
i++
|
||||
case '}':
|
||||
i--
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FuncDecl = "func" ExportedName Signature [ FuncBody ] .
|
||||
func (p *gc_parser) parse_func_decl() (string, *ast.FuncDecl) {
|
||||
// "func" was already consumed by lookahead
|
||||
name := p.parse_exported_name()
|
||||
typ := p.parse_signature()
|
||||
if p.tok == '{' {
|
||||
p.parse_func_body()
|
||||
}
|
||||
return name.X.(*ast.Ident).Name, &ast.FuncDecl{
|
||||
Name: name.Sel,
|
||||
Type: typ,
|
||||
}
|
||||
}
|
||||
|
||||
func strip_method_receiver(recv *ast.FieldList) string {
|
||||
var sel *ast.SelectorExpr
|
||||
|
||||
// find selector expression
|
||||
typ := recv.List[0].Type
|
||||
switch t := typ.(type) {
|
||||
case *ast.StarExpr:
|
||||
sel = t.X.(*ast.SelectorExpr)
|
||||
case *ast.SelectorExpr:
|
||||
sel = t
|
||||
}
|
||||
|
||||
// extract package path
|
||||
pkg := sel.X.(*ast.Ident).Name
|
||||
|
||||
// write back stripped type
|
||||
switch t := typ.(type) {
|
||||
case *ast.StarExpr:
|
||||
t.X = sel.Sel
|
||||
case *ast.SelectorExpr:
|
||||
recv.List[0].Type = sel.Sel
|
||||
}
|
||||
|
||||
return pkg
|
||||
}
|
||||
|
||||
// MethodDecl = "func" Receiver Name Signature .
|
||||
// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" [ FuncBody ] .
|
||||
func (p *gc_parser) parse_method_decl() (string, *ast.FuncDecl) {
|
||||
recv := p.parse_parameters()
|
||||
pkg := strip_method_receiver(recv)
|
||||
name, _ := p.parse_name()
|
||||
typ := p.parse_signature()
|
||||
if p.tok == '{' {
|
||||
p.parse_func_body()
|
||||
}
|
||||
return pkg, &ast.FuncDecl{
|
||||
Recv: recv,
|
||||
Name: ast.NewIdent(name),
|
||||
Type: typ,
|
||||
}
|
||||
}
|
||||
|
||||
// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
|
||||
func (p *gc_parser) parse_decl() (pkg string, decl ast.Decl) {
|
||||
switch p.lit {
|
||||
case "import":
|
||||
p.parse_import_decl()
|
||||
case "const":
|
||||
pkg, decl = p.parse_const_decl()
|
||||
case "type":
|
||||
pkg, decl = p.parse_type_decl()
|
||||
case "var":
|
||||
pkg, decl = p.parse_var_decl()
|
||||
case "func":
|
||||
p.next()
|
||||
if p.tok == '(' {
|
||||
pkg, decl = p.parse_method_decl()
|
||||
} else {
|
||||
pkg, decl = p.parse_func_decl()
|
||||
}
|
||||
}
|
||||
p.expect('\n')
|
||||
return
|
||||
}
|
||||
|
||||
// Export = PackageClause { Decl } "$$" .
|
||||
// PackageClause = "package" identifier [ "safe" ] "\n" .
|
||||
func (p *gc_parser) parse_export(callback func(string, ast.Decl)) {
|
||||
p.expect_keyword("package")
|
||||
p.pfc.defalias = p.expect(scanner.Ident)
|
||||
if p.tok != '\n' {
|
||||
p.expect_keyword("safe")
|
||||
}
|
||||
p.expect('\n')
|
||||
|
||||
for p.tok != '$' && p.tok != scanner.EOF {
|
||||
pkg, decl := p.parse_decl()
|
||||
if decl != nil {
|
||||
callback(pkg, decl)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
// +build !go1.7
|
||||
|
||||
package main
|
||||
|
||||
func init() {
|
||||
knownPackageIdents["context"] = "golang.org/x/net/context"
|
||||
}
|
|
@ -0,0 +1,141 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"go/scanner"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
// All the code in this file serves single purpose:
|
||||
// It separates a function with the cursor inside and the rest of the code. I'm
|
||||
// doing that, because sometimes parser is not able to recover itself from an
|
||||
// error and the autocompletion results become less complete.
|
||||
|
||||
type tok_pos_pair struct {
|
||||
tok token.Token
|
||||
pos token.Pos
|
||||
}
|
||||
|
||||
type tok_collection struct {
|
||||
tokens []tok_pos_pair
|
||||
fset *token.FileSet
|
||||
}
|
||||
|
||||
func (this *tok_collection) next(s *scanner.Scanner) bool {
|
||||
pos, tok, _ := s.Scan()
|
||||
if tok == token.EOF {
|
||||
return false
|
||||
}
|
||||
|
||||
this.tokens = append(this.tokens, tok_pos_pair{tok, pos})
|
||||
return true
|
||||
}
|
||||
|
||||
func (this *tok_collection) find_decl_beg(pos int) int {
|
||||
lowest := 0
|
||||
lowpos := -1
|
||||
lowi := -1
|
||||
cur := 0
|
||||
for i := pos; i >= 0; i-- {
|
||||
t := this.tokens[i]
|
||||
switch t.tok {
|
||||
case token.RBRACE:
|
||||
cur++
|
||||
case token.LBRACE:
|
||||
cur--
|
||||
}
|
||||
|
||||
if cur < lowest {
|
||||
lowest = cur
|
||||
lowpos = this.fset.Position(t.pos).Offset
|
||||
lowi = i
|
||||
}
|
||||
}
|
||||
|
||||
cur = lowest
|
||||
for i := lowi - 1; i >= 0; i-- {
|
||||
t := this.tokens[i]
|
||||
switch t.tok {
|
||||
case token.RBRACE:
|
||||
cur++
|
||||
case token.LBRACE:
|
||||
cur--
|
||||
}
|
||||
if t.tok == token.SEMICOLON && cur == lowest {
|
||||
lowpos = this.fset.Position(t.pos).Offset
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return lowpos
|
||||
}
|
||||
|
||||
func (this *tok_collection) find_decl_end(pos int) int {
|
||||
highest := 0
|
||||
highpos := -1
|
||||
cur := 0
|
||||
|
||||
if this.tokens[pos].tok == token.LBRACE {
|
||||
pos++
|
||||
}
|
||||
|
||||
for i := pos; i < len(this.tokens); i++ {
|
||||
t := this.tokens[i]
|
||||
switch t.tok {
|
||||
case token.RBRACE:
|
||||
cur++
|
||||
case token.LBRACE:
|
||||
cur--
|
||||
}
|
||||
|
||||
if cur > highest {
|
||||
highest = cur
|
||||
highpos = this.fset.Position(t.pos).Offset
|
||||
}
|
||||
}
|
||||
|
||||
return highpos
|
||||
}
|
||||
|
||||
func (this *tok_collection) find_outermost_scope(cursor int) (int, int) {
|
||||
pos := 0
|
||||
|
||||
for i, t := range this.tokens {
|
||||
if cursor <= this.fset.Position(t.pos).Offset {
|
||||
break
|
||||
}
|
||||
pos = i
|
||||
}
|
||||
|
||||
return this.find_decl_beg(pos), this.find_decl_end(pos)
|
||||
}
|
||||
|
||||
// return new cursor position, file without ripped part and the ripped part itself
|
||||
// variants:
|
||||
// new-cursor, file-without-ripped-part, ripped-part
|
||||
// old-cursor, file, nil
|
||||
func (this *tok_collection) rip_off_decl(file []byte, cursor int) (int, []byte, []byte) {
|
||||
this.fset = token.NewFileSet()
|
||||
var s scanner.Scanner
|
||||
s.Init(this.fset.AddFile("", this.fset.Base(), len(file)), file, nil, scanner.ScanComments)
|
||||
for this.next(&s) {
|
||||
}
|
||||
|
||||
beg, end := this.find_outermost_scope(cursor)
|
||||
if beg == -1 || end == -1 {
|
||||
return cursor, file, nil
|
||||
}
|
||||
|
||||
ripped := make([]byte, end+1-beg)
|
||||
copy(ripped, file[beg:end+1])
|
||||
|
||||
newfile := make([]byte, len(file)-len(ripped))
|
||||
copy(newfile, file[:beg])
|
||||
copy(newfile[beg:], file[end+1:])
|
||||
|
||||
return cursor - beg, newfile, ripped
|
||||
}
|
||||
|
||||
func rip_off_decl(file []byte, cursor int) (int, []byte, []byte) {
|
||||
var tc tok_collection
|
||||
return tc.rip_off_decl(file, cursor)
|
||||
}
|
|
@ -0,0 +1,138 @@
|
|||
// WARNING! Autogenerated by goremote, don't touch.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"net/rpc"
|
||||
)
|
||||
|
||||
type RPC struct {
|
||||
}
|
||||
|
||||
// wrapper for: server_auto_complete
|
||||
|
||||
type Args_auto_complete struct {
|
||||
Arg0 []byte
|
||||
Arg1 string
|
||||
Arg2 int
|
||||
Arg3 go_build_context
|
||||
}
|
||||
type Reply_auto_complete struct {
|
||||
Arg0 []candidate
|
||||
Arg1 int
|
||||
}
|
||||
|
||||
func (r *RPC) RPC_auto_complete(args *Args_auto_complete, reply *Reply_auto_complete) error {
|
||||
reply.Arg0, reply.Arg1 = server_auto_complete(args.Arg0, args.Arg1, args.Arg2, args.Arg3)
|
||||
return nil
|
||||
}
|
||||
func client_auto_complete(cli *rpc.Client, Arg0 []byte, Arg1 string, Arg2 int, Arg3 go_build_context) (c []candidate, d int) {
|
||||
var args Args_auto_complete
|
||||
var reply Reply_auto_complete
|
||||
args.Arg0 = Arg0
|
||||
args.Arg1 = Arg1
|
||||
args.Arg2 = Arg2
|
||||
args.Arg3 = Arg3
|
||||
err := cli.Call("RPC.RPC_auto_complete", &args, &reply)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return reply.Arg0, reply.Arg1
|
||||
}
|
||||
|
||||
// wrapper for: server_close
|
||||
|
||||
type Args_close struct {
|
||||
Arg0 int
|
||||
}
|
||||
type Reply_close struct {
|
||||
Arg0 int
|
||||
}
|
||||
|
||||
func (r *RPC) RPC_close(args *Args_close, reply *Reply_close) error {
|
||||
reply.Arg0 = server_close(args.Arg0)
|
||||
return nil
|
||||
}
|
||||
func client_close(cli *rpc.Client, Arg0 int) int {
|
||||
var args Args_close
|
||||
var reply Reply_close
|
||||
args.Arg0 = Arg0
|
||||
err := cli.Call("RPC.RPC_close", &args, &reply)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return reply.Arg0
|
||||
}
|
||||
|
||||
// wrapper for: server_status
|
||||
|
||||
type Args_status struct {
|
||||
Arg0 int
|
||||
}
|
||||
type Reply_status struct {
|
||||
Arg0 string
|
||||
}
|
||||
|
||||
func (r *RPC) RPC_status(args *Args_status, reply *Reply_status) error {
|
||||
reply.Arg0 = server_status(args.Arg0)
|
||||
return nil
|
||||
}
|
||||
func client_status(cli *rpc.Client, Arg0 int) string {
|
||||
var args Args_status
|
||||
var reply Reply_status
|
||||
args.Arg0 = Arg0
|
||||
err := cli.Call("RPC.RPC_status", &args, &reply)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return reply.Arg0
|
||||
}
|
||||
|
||||
// wrapper for: server_drop_cache
|
||||
|
||||
type Args_drop_cache struct {
|
||||
Arg0 int
|
||||
}
|
||||
type Reply_drop_cache struct {
|
||||
Arg0 int
|
||||
}
|
||||
|
||||
func (r *RPC) RPC_drop_cache(args *Args_drop_cache, reply *Reply_drop_cache) error {
|
||||
reply.Arg0 = server_drop_cache(args.Arg0)
|
||||
return nil
|
||||
}
|
||||
func client_drop_cache(cli *rpc.Client, Arg0 int) int {
|
||||
var args Args_drop_cache
|
||||
var reply Reply_drop_cache
|
||||
args.Arg0 = Arg0
|
||||
err := cli.Call("RPC.RPC_drop_cache", &args, &reply)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return reply.Arg0
|
||||
}
|
||||
|
||||
// wrapper for: server_set
|
||||
|
||||
type Args_set struct {
|
||||
Arg0, Arg1 string
|
||||
}
|
||||
type Reply_set struct {
|
||||
Arg0 string
|
||||
}
|
||||
|
||||
func (r *RPC) RPC_set(args *Args_set, reply *Reply_set) error {
|
||||
reply.Arg0 = server_set(args.Arg0, args.Arg1)
|
||||
return nil
|
||||
}
|
||||
func client_set(cli *rpc.Client, Arg0, Arg1 string) string {
|
||||
var args Args_set
|
||||
var reply Reply_set
|
||||
args.Arg0 = Arg0
|
||||
args.Arg1 = Arg1
|
||||
err := cli.Call("RPC.RPC_set", &args, &reply)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return reply.Arg0
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
package main
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// scope
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type scope struct {
|
||||
parent *scope // nil for universe scope
|
||||
entities map[string]*decl
|
||||
}
|
||||
|
||||
func new_scope(outer *scope) *scope {
|
||||
s := new(scope)
|
||||
s.parent = outer
|
||||
s.entities = make(map[string]*decl)
|
||||
return s
|
||||
}
|
||||
|
||||
// returns: new, prev
|
||||
func advance_scope(s *scope) (*scope, *scope) {
|
||||
if len(s.entities) == 0 {
|
||||
return s, s.parent
|
||||
}
|
||||
return new_scope(s), s
|
||||
}
|
||||
|
||||
// adds declaration or returns an existing one
|
||||
func (s *scope) add_named_decl(d *decl) *decl {
|
||||
return s.add_decl(d.name, d)
|
||||
}
|
||||
|
||||
func (s *scope) add_decl(name string, d *decl) *decl {
|
||||
decl, ok := s.entities[name]
|
||||
if !ok {
|
||||
s.entities[name] = d
|
||||
return d
|
||||
}
|
||||
return decl
|
||||
}
|
||||
|
||||
func (s *scope) replace_decl(name string, d *decl) {
|
||||
s.entities[name] = d
|
||||
}
|
||||
|
||||
func (s *scope) merge_decl(d *decl) {
|
||||
decl, ok := s.entities[d.name]
|
||||
if !ok {
|
||||
s.entities[d.name] = d
|
||||
} else {
|
||||
decl := decl.deep_copy()
|
||||
decl.expand_or_replace(d)
|
||||
s.entities[d.name] = decl
|
||||
}
|
||||
}
|
||||
|
||||
func (s *scope) lookup(name string) *decl {
|
||||
decl, ok := s.entities[name]
|
||||
if !ok {
|
||||
if s.parent != nil {
|
||||
return s.parent.lookup(name)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return decl
|
||||
}
|
|
@ -0,0 +1,237 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"log"
|
||||
"net"
|
||||
"net/rpc"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"time"
|
||||
)
|
||||
|
||||
func do_server() int {
|
||||
g_config.read()
|
||||
if g_config.ForceDebugOutput != "" {
|
||||
// forcefully enable debugging and redirect logging into the
|
||||
// specified file
|
||||
*g_debug = true
|
||||
f, err := os.Create(g_config.ForceDebugOutput)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.SetOutput(f)
|
||||
}
|
||||
|
||||
addr := *g_addr
|
||||
if *g_sock == "unix" {
|
||||
addr = get_socket_filename()
|
||||
if file_exists(addr) {
|
||||
log.Printf("unix socket: '%s' already exists\n", addr)
|
||||
return 1
|
||||
}
|
||||
}
|
||||
g_daemon = new_daemon(*g_sock, addr)
|
||||
if *g_sock == "unix" {
|
||||
// cleanup unix socket file
|
||||
defer os.Remove(addr)
|
||||
}
|
||||
|
||||
rpc.Register(new(RPC))
|
||||
|
||||
g_daemon.loop()
|
||||
return 0
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// daemon
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type daemon struct {
|
||||
listener net.Listener
|
||||
cmd_in chan int
|
||||
autocomplete *auto_complete_context
|
||||
pkgcache package_cache
|
||||
declcache *decl_cache
|
||||
context package_lookup_context
|
||||
}
|
||||
|
||||
func new_daemon(network, address string) *daemon {
|
||||
var err error
|
||||
|
||||
d := new(daemon)
|
||||
d.listener, err = net.Listen(network, address)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
d.cmd_in = make(chan int, 1)
|
||||
d.pkgcache = new_package_cache()
|
||||
d.declcache = new_decl_cache(&d.context)
|
||||
d.autocomplete = new_auto_complete_context(d.pkgcache, d.declcache)
|
||||
return d
|
||||
}
|
||||
|
||||
func (this *daemon) drop_cache() {
|
||||
this.pkgcache = new_package_cache()
|
||||
this.declcache = new_decl_cache(&this.context)
|
||||
this.autocomplete = new_auto_complete_context(this.pkgcache, this.declcache)
|
||||
}
|
||||
|
||||
const (
|
||||
daemon_close = iota
|
||||
)
|
||||
|
||||
func (this *daemon) loop() {
|
||||
conn_in := make(chan net.Conn)
|
||||
go func() {
|
||||
for {
|
||||
c, err := this.listener.Accept()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
conn_in <- c
|
||||
}
|
||||
}()
|
||||
|
||||
timeout := time.Duration(g_config.CloseTimeout) * time.Second
|
||||
countdown := time.NewTimer(timeout)
|
||||
|
||||
for {
|
||||
// handle connections or server CMDs (currently one CMD)
|
||||
select {
|
||||
case c := <-conn_in:
|
||||
rpc.ServeConn(c)
|
||||
countdown.Reset(timeout)
|
||||
runtime.GC()
|
||||
case cmd := <-this.cmd_in:
|
||||
switch cmd {
|
||||
case daemon_close:
|
||||
return
|
||||
}
|
||||
case <-countdown.C:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (this *daemon) close() {
|
||||
this.cmd_in <- daemon_close
|
||||
}
|
||||
|
||||
var g_daemon *daemon
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// server_* functions
|
||||
//
|
||||
// Corresponding client_* functions are autogenerated by goremote.
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
func server_auto_complete(file []byte, filename string, cursor int, context_packed go_build_context) (c []candidate, d int) {
|
||||
context := unpack_build_context(&context_packed)
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
print_backtrace(err)
|
||||
c = []candidate{
|
||||
{"PANIC", "PANIC", decl_invalid},
|
||||
}
|
||||
|
||||
// drop cache
|
||||
g_daemon.drop_cache()
|
||||
}
|
||||
}()
|
||||
// TODO: Probably we don't care about comparing all the fields, checking GOROOT and GOPATH
|
||||
// should be enough.
|
||||
if !reflect.DeepEqual(g_daemon.context.Context, context.Context) {
|
||||
g_daemon.context = context
|
||||
g_daemon.drop_cache()
|
||||
}
|
||||
switch g_config.PackageLookupMode {
|
||||
case "bzl":
|
||||
// when package lookup mode is bzl, we set GOPATH to "" explicitly and
|
||||
// BzlProjectRoot becomes valid (or empty)
|
||||
var err error
|
||||
g_daemon.context.GOPATH = ""
|
||||
g_daemon.context.BzlProjectRoot, err = find_bzl_project_root(g_config.LibPath, filename)
|
||||
if *g_debug && err != nil {
|
||||
log.Printf("Bzl project root not found: %s", err)
|
||||
}
|
||||
case "gb":
|
||||
// when package lookup mode is gb, we set GOPATH to "" explicitly and
|
||||
// GBProjectRoot becomes valid (or empty)
|
||||
var err error
|
||||
g_daemon.context.GOPATH = ""
|
||||
g_daemon.context.GBProjectRoot, err = find_gb_project_root(filename)
|
||||
if *g_debug && err != nil {
|
||||
log.Printf("Gb project root not found: %s", err)
|
||||
}
|
||||
case "go":
|
||||
// get current package path for GO15VENDOREXPERIMENT hack
|
||||
g_daemon.context.CurrentPackagePath = ""
|
||||
pkg, err := g_daemon.context.ImportDir(filepath.Dir(filename), build.FindOnly)
|
||||
if err == nil {
|
||||
if *g_debug {
|
||||
log.Printf("Go project path: %s", pkg.ImportPath)
|
||||
}
|
||||
g_daemon.context.CurrentPackagePath = pkg.ImportPath
|
||||
} else if *g_debug {
|
||||
log.Printf("Go project path not found: %s", err)
|
||||
}
|
||||
}
|
||||
if *g_debug {
|
||||
var buf bytes.Buffer
|
||||
log.Printf("Got autocompletion request for '%s'\n", filename)
|
||||
log.Printf("Cursor at: %d\n", cursor)
|
||||
buf.WriteString("-------------------------------------------------------\n")
|
||||
buf.Write(file[:cursor])
|
||||
buf.WriteString("#")
|
||||
buf.Write(file[cursor:])
|
||||
log.Print(buf.String())
|
||||
log.Println("-------------------------------------------------------")
|
||||
}
|
||||
candidates, d := g_daemon.autocomplete.apropos(file, filename, cursor)
|
||||
if *g_debug {
|
||||
log.Printf("Offset: %d\n", d)
|
||||
log.Printf("Number of candidates found: %d\n", len(candidates))
|
||||
log.Printf("Candidates are:\n")
|
||||
for _, c := range candidates {
|
||||
abbr := fmt.Sprintf("%s %s %s", c.Class, c.Name, c.Type)
|
||||
if c.Class == decl_func {
|
||||
abbr = fmt.Sprintf("%s %s%s", c.Class, c.Name, c.Type[len("func"):])
|
||||
}
|
||||
log.Printf(" %s\n", abbr)
|
||||
}
|
||||
log.Println("=======================================================")
|
||||
}
|
||||
return candidates, d
|
||||
}
|
||||
|
||||
func server_close(notused int) int {
|
||||
g_daemon.close()
|
||||
return 0
|
||||
}
|
||||
|
||||
func server_status(notused int) string {
|
||||
return g_daemon.autocomplete.status()
|
||||
}
|
||||
|
||||
func server_drop_cache(notused int) int {
|
||||
// drop cache
|
||||
g_daemon.drop_cache()
|
||||
return 0
|
||||
}
|
||||
|
||||
func server_set(key, value string) string {
|
||||
if key == "\x00" {
|
||||
return g_config.list()
|
||||
} else if value == "\x00" {
|
||||
return g_config.list_option(key)
|
||||
}
|
||||
// drop cache on settings changes
|
||||
g_daemon.drop_cache()
|
||||
return g_config.set_option(key, value)
|
||||
}
|
|
@ -0,0 +1,287 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// our own readdir, which skips the files it cannot lstat
|
||||
func readdir_lstat(name string) ([]os.FileInfo, error) {
|
||||
f, err := os.Open(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
names, err := f.Readdirnames(-1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
out := make([]os.FileInfo, 0, len(names))
|
||||
for _, lname := range names {
|
||||
s, err := os.Lstat(filepath.Join(name, lname))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
out = append(out, s)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// our other readdir function, only opens and reads
|
||||
func readdir(dirname string) []os.FileInfo {
|
||||
f, err := os.Open(dirname)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
fi, err := f.Readdir(-1)
|
||||
f.Close()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return fi
|
||||
}
|
||||
|
||||
// returns truncated 'data' and amount of bytes skipped (for cursor pos adjustment)
|
||||
func filter_out_shebang(data []byte) ([]byte, int) {
|
||||
if len(data) > 2 && data[0] == '#' && data[1] == '!' {
|
||||
newline := bytes.Index(data, []byte("\n"))
|
||||
if newline != -1 && len(data) > newline+1 {
|
||||
return data[newline+1:], newline + 1
|
||||
}
|
||||
}
|
||||
return data, 0
|
||||
}
|
||||
|
||||
func file_exists(filename string) bool {
|
||||
_, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func is_dir(path string) bool {
|
||||
fi, err := os.Stat(path)
|
||||
return err == nil && fi.IsDir()
|
||||
}
|
||||
|
||||
func char_to_byte_offset(s []byte, offset_c int) (offset_b int) {
|
||||
for offset_b = 0; offset_c > 0 && offset_b < len(s); offset_b++ {
|
||||
if utf8.RuneStart(s[offset_b]) {
|
||||
offset_c--
|
||||
}
|
||||
}
|
||||
return offset_b
|
||||
}
|
||||
|
||||
func xdg_home_dir() string {
|
||||
xdghome := os.Getenv("XDG_CONFIG_HOME")
|
||||
if xdghome == "" {
|
||||
xdghome = filepath.Join(os.Getenv("HOME"), ".config")
|
||||
}
|
||||
return xdghome
|
||||
}
|
||||
|
||||
func has_prefix(s, prefix string, ignorecase bool) bool {
|
||||
if ignorecase {
|
||||
s = strings.ToLower(s)
|
||||
prefix = strings.ToLower(prefix)
|
||||
}
|
||||
return strings.HasPrefix(s, prefix)
|
||||
}
|
||||
|
||||
func find_bzl_project_root(libpath, path string) (string, error) {
|
||||
if libpath == "" {
|
||||
return "", fmt.Errorf("could not find project root, libpath is empty")
|
||||
}
|
||||
|
||||
pathMap := map[string]struct{}{}
|
||||
for _, lp := range strings.Split(libpath, ":") {
|
||||
lp := strings.TrimSpace(lp)
|
||||
pathMap[filepath.Clean(lp)] = struct{}{}
|
||||
}
|
||||
|
||||
path = filepath.Dir(path)
|
||||
if path == "" {
|
||||
return "", fmt.Errorf("project root is blank")
|
||||
}
|
||||
|
||||
start := path
|
||||
for path != "/" {
|
||||
if _, ok := pathMap[filepath.Clean(path)]; ok {
|
||||
return path, nil
|
||||
}
|
||||
path = filepath.Dir(path)
|
||||
}
|
||||
return "", fmt.Errorf("could not find project root in %q or its parents", start)
|
||||
}
|
||||
|
||||
// Code taken directly from `gb`, I hope author doesn't mind.
|
||||
func find_gb_project_root(path string) (string, error) {
|
||||
path = filepath.Dir(path)
|
||||
if path == "" {
|
||||
return "", fmt.Errorf("project root is blank")
|
||||
}
|
||||
start := path
|
||||
for path != "/" {
|
||||
root := filepath.Join(path, "src")
|
||||
if _, err := os.Stat(root); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
path = filepath.Dir(path)
|
||||
continue
|
||||
}
|
||||
return "", err
|
||||
}
|
||||
path, err := filepath.EvalSymlinks(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return path, nil
|
||||
}
|
||||
return "", fmt.Errorf("could not find project root in %q or its parents", start)
|
||||
}
|
||||
|
||||
// vendorlessImportPath returns the devendorized version of the provided import path.
|
||||
// e.g. "foo/bar/vendor/a/b" => "a/b"
|
||||
func vendorlessImportPath(ipath string) string {
|
||||
// Devendorize for use in import statement.
|
||||
if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
|
||||
return ipath[i+len("/vendor/"):]
|
||||
}
|
||||
if strings.HasPrefix(ipath, "vendor/") {
|
||||
return ipath[len("vendor/"):]
|
||||
}
|
||||
return ipath
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// print_backtrace
|
||||
//
|
||||
// a nicer backtrace printer than the default one
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
var g_backtrace_mutex sync.Mutex
|
||||
|
||||
func print_backtrace(err interface{}) {
|
||||
g_backtrace_mutex.Lock()
|
||||
defer g_backtrace_mutex.Unlock()
|
||||
fmt.Printf("panic: %v\n", err)
|
||||
i := 2
|
||||
for {
|
||||
pc, file, line, ok := runtime.Caller(i)
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
f := runtime.FuncForPC(pc)
|
||||
fmt.Printf("%d(%s): %s:%d\n", i-1, f.Name(), file, line)
|
||||
i++
|
||||
}
|
||||
fmt.Println("")
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// File reader goroutine
|
||||
//
|
||||
// It's a bad idea to block multiple goroutines on file I/O. Creates many
|
||||
// threads which fight for HDD. Therefore only single goroutine should read HDD
|
||||
// at the same time.
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type file_read_request struct {
|
||||
filename string
|
||||
out chan file_read_response
|
||||
}
|
||||
|
||||
type file_read_response struct {
|
||||
data []byte
|
||||
error error
|
||||
}
|
||||
|
||||
type file_reader_type struct {
|
||||
in chan file_read_request
|
||||
}
|
||||
|
||||
func new_file_reader() *file_reader_type {
|
||||
this := new(file_reader_type)
|
||||
this.in = make(chan file_read_request)
|
||||
go func() {
|
||||
var rsp file_read_response
|
||||
for {
|
||||
req := <-this.in
|
||||
rsp.data, rsp.error = ioutil.ReadFile(req.filename)
|
||||
req.out <- rsp
|
||||
}
|
||||
}()
|
||||
return this
|
||||
}
|
||||
|
||||
func (this *file_reader_type) read_file(filename string) ([]byte, error) {
|
||||
req := file_read_request{
|
||||
filename,
|
||||
make(chan file_read_response),
|
||||
}
|
||||
this.in <- req
|
||||
rsp := <-req.out
|
||||
return rsp.data, rsp.error
|
||||
}
|
||||
|
||||
var file_reader = new_file_reader()
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// copy of the build.Context without func fields
|
||||
//-------------------------------------------------------------------------
|
||||
|
||||
type go_build_context struct {
|
||||
GOARCH string
|
||||
GOOS string
|
||||
GOROOT string
|
||||
GOPATH string
|
||||
CgoEnabled bool
|
||||
UseAllFiles bool
|
||||
Compiler string
|
||||
BuildTags []string
|
||||
ReleaseTags []string
|
||||
InstallSuffix string
|
||||
}
|
||||
|
||||
func pack_build_context(ctx *build.Context) go_build_context {
|
||||
return go_build_context{
|
||||
GOARCH: ctx.GOARCH,
|
||||
GOOS: ctx.GOOS,
|
||||
GOROOT: ctx.GOROOT,
|
||||
GOPATH: ctx.GOPATH,
|
||||
CgoEnabled: ctx.CgoEnabled,
|
||||
UseAllFiles: ctx.UseAllFiles,
|
||||
Compiler: ctx.Compiler,
|
||||
BuildTags: ctx.BuildTags,
|
||||
ReleaseTags: ctx.ReleaseTags,
|
||||
InstallSuffix: ctx.InstallSuffix,
|
||||
}
|
||||
}
|
||||
|
||||
func unpack_build_context(ctx *go_build_context) package_lookup_context {
|
||||
return package_lookup_context{
|
||||
Context: build.Context{
|
||||
GOARCH: ctx.GOARCH,
|
||||
GOOS: ctx.GOOS,
|
||||
GOROOT: ctx.GOROOT,
|
||||
GOPATH: ctx.GOPATH,
|
||||
CgoEnabled: ctx.CgoEnabled,
|
||||
UseAllFiles: ctx.UseAllFiles,
|
||||
Compiler: ctx.Compiler,
|
||||
BuildTags: ctx.BuildTags,
|
||||
ReleaseTags: ctx.ReleaseTags,
|
||||
InstallSuffix: ctx.InstallSuffix,
|
||||
},
|
||||
}
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
Copyright (c) 2015, visualfc <visualfc@gmail.com>
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of gotools nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
# gotools
|
||||
liteide golang tools
|
|
@ -0,0 +1,8 @@
|
|||
// Copyright 2011-2015 visualfc <visualfc@gmail.com>. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
gotools document
|
||||
*/
|
||||
package main
|
|
@ -0,0 +1,41 @@
|
|||
// Copyright 2011-2015 visualfc <visualfc@gmail.com>. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/visualfc/gotools/astview"
|
||||
"github.com/visualfc/gotools/command"
|
||||
"github.com/visualfc/gotools/docview"
|
||||
"github.com/visualfc/gotools/finddoc"
|
||||
"github.com/visualfc/gotools/goapi"
|
||||
"github.com/visualfc/gotools/goimports"
|
||||
"github.com/visualfc/gotools/gopresent"
|
||||
"github.com/visualfc/gotools/jsonfmt"
|
||||
"github.com/visualfc/gotools/oracle"
|
||||
"github.com/visualfc/gotools/pkgs"
|
||||
"github.com/visualfc/gotools/runcmd"
|
||||
"github.com/visualfc/gotools/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
command.Register(types.Command)
|
||||
command.Register(jsonfmt.Command)
|
||||
command.Register(finddoc.Command)
|
||||
command.Register(runcmd.Command)
|
||||
command.Register(docview.Command)
|
||||
command.Register(astview.Command)
|
||||
command.Register(goimports.Command)
|
||||
command.Register(gopresent.Command)
|
||||
command.Register(goapi.Command)
|
||||
command.Register(pkgs.Command)
|
||||
command.Register(oracle.Command)
|
||||
}
|
||||
|
||||
func main() {
|
||||
command.AppName = "gotools"
|
||||
command.AppVersion = "1.0"
|
||||
command.AppInfo = "Go tools for liteide."
|
||||
command.Main()
|
||||
}
|
|
@ -2,6 +2,18 @@
|
|||
"comment": "",
|
||||
"ignore": "test",
|
||||
"package": [
|
||||
{
|
||||
"checksumSHA1": "+dVSWmypRy7k2emKXonbGs4aMC0=",
|
||||
"path": "github.com/b3log/wide",
|
||||
"revision": "a399713787402b6b127ad3ed706f86546007fa50",
|
||||
"revisionTime": "2018-03-13T03:25:58Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "83iEp3SqOoIkZUYyR7BOVP4vaGE=",
|
||||
"path": "github.com/bradfitz/goimports",
|
||||
"revision": "919f4f2bcea0744d4da4ae851fbf818ae11cba87",
|
||||
"revisionTime": "2014-12-11T23:42:42Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "9G8Rchzwv9ERNhD09uy7wvmqBJI=",
|
||||
"path": "github.com/go-fsnotify/fsnotify",
|
||||
|
@ -32,6 +44,18 @@
|
|||
"revision": "a91eba7f97777409bc2c443f5534d41dd20c5720",
|
||||
"revisionTime": "2017-03-19T17:27:27Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "5i+5zScV0FNuG3cmRzRPn6PFsfo=",
|
||||
"path": "github.com/nsf/gocode",
|
||||
"revision": "5070dacabf2a80deeaf4ddb0be3761d06fce7be5",
|
||||
"revisionTime": "2016-11-22T21:38:51Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "4fp/TH5nX7seO5p4qJfIj/BokbI=",
|
||||
"path": "github.com/visualfc/gotools",
|
||||
"revision": "b8348693492ca3791bccfa028f3c19634c11c5b5",
|
||||
"revisionTime": "2015-04-09T14:25:36Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "Tkb1hBdBWeO7SGjixS2Hm48F6+s=",
|
||||
"path": "golang.org/x/sys/unix",
|
||||
|
|
Loading…
Reference in New Issue