diff --git a/.ci/static_analysis_check.sh b/.ci/static_analysis_check.sh
index 10fc6d8350..d80b41d881 100755
--- a/.ci/static_analysis_check.sh
+++ b/.ci/static_analysis_check.sh
@@ -17,7 +17,7 @@ find . -not -path "./third_party/*" -a -not -path "./*build*/*" -a \( -iname "*.
isort --version
black --version
pflake8 --version
-for folder in api/http api/python bingo/bingo-elastic/python api/tests/integration utils/indigo-service/service
+for folder in api/http api/python bingo/bingo-elastic/python api/tests/integration utils/indigo-service/backend/service
do
cd ${folder}
isort --check .
diff --git a/.ci/static_analysis_fix.sh b/.ci/static_analysis_fix.sh
index e784bd1400..958cc07809 100755
--- a/.ci/static_analysis_fix.sh
+++ b/.ci/static_analysis_fix.sh
@@ -17,7 +17,7 @@ find . -not -path "./third_party/*" -a -not -path "./*build*/*" -a \( -iname "*.
isort --version
black --version
pflake8 --version
-for folder in api/http api/python bingo/bingo-elastic/python api/tests/integration utils/indigo-service/service
+for folder in api/http api/python bingo/bingo-elastic/python api/tests/integration utils/indigo-service/backend/service
do
cd ${folder}
isort .
diff --git a/.github/workflows/indigo-ci.yaml b/.github/workflows/indigo-ci.yaml
index 8c6919471f..f358b104af 100644
--- a/.github/workflows/indigo-ci.yaml
+++ b/.github/workflows/indigo-ci.yaml
@@ -1154,7 +1154,7 @@ jobs:
name: indigo-python
path: utils/indigo-service/lib/
- name: Build
- run: docker build -f ./utils/indigo-service/service/Dockerfile -t epmlsop/indigo-service:latest ./utils/indigo-service
+ run: docker build -f ./utils/indigo-service/backend/Dockerfile -t epmlsop/indigo-service:latest ./utils/indigo-service/backend
# TODO: add tests
# - name: Test
# run: |
diff --git a/utils/indigo-service-client/Dockerfile b/utils/indigo-service-client/Dockerfile
deleted file mode 100644
index 1bd88253ab..0000000000
--- a/utils/indigo-service-client/Dockerfile
+++ /dev/null
@@ -1,13 +0,0 @@
-FROM node:12-slim
-
-ENV DEBIAN_FRONTEND noninteractive
-
-RUN apt-get update -qq
-
-RUN apt-get install -y --no-install-recommends \
- git
-
-#RUN npm install gulp -g
-
-WORKDIR /code
-
diff --git a/utils/indigo-service-client/LICENSE b/utils/indigo-service-client/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/utils/indigo-service-client/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/utils/indigo-service-client/dev.docker-compose.yml b/utils/indigo-service-client/dev.docker-compose.yml
deleted file mode 100644
index a6166e0883..0000000000
--- a/utils/indigo-service-client/dev.docker-compose.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-env:
- build: ./
- dockerfile: ./Dockerfile
- volumes:
- - ./:/code
- ports:
- - "3000:3000"
- command:
- bash -c "useradd -m -s /bin/bash $USER || true && /bin/su -s /bin/bash -c 'npm dedupe && npm install && gulp serve' $USER"
diff --git a/utils/indigo-service-client/docker-compose.yml b/utils/indigo-service-client/docker-compose.yml
deleted file mode 100644
index 4f0fcc9334..0000000000
--- a/utils/indigo-service-client/docker-compose.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-env:
- build: ./
- dockerfile: ./Dockerfile
- volumes:
- - ./:/code
- tty: false
- # Command contains workaround to set permissions for local user
- command:
- # bash -c "useradd -m -s /bin/bash $USER || true && /bin/su -s /bin/bash -c 'npm install && npm run gulp archive' $USER"
- bash -c "npm install && npm run gulp archive"
\ No newline at end of file
diff --git a/utils/indigo-service-client/script/imago.js b/utils/indigo-service-client/script/imago.js
deleted file mode 100644
index 53dfd8f6e7..0000000000
--- a/utils/indigo-service-client/script/imago.js
+++ /dev/null
@@ -1,86 +0,0 @@
-var m = require('mithril');
-var x = require('./x');
-var e = require('./echo');
-
-var imago_result = {};
-
-var imagoView = function (config) {
- return [
- m('h1', 'Imago'),
- m('h3', 'Please choose PNG or JPG file to Recognize'),
- m('label.upload', {
- title: 'Upload image',
- onchange: upload.bind(null, config.server, config)
- }, m('input[type=file]')),
- imago_result.img_data == null ?
- m('p', 'No Results') :
- m('table', [
- m('thead', m('tr', [
- m('th', 'Loaded Image'),
- m('th', 'Recognized Image')])),
- m('tbody',
- m('tr', [
- m('td', m('img', { src: imago_result.img_data})),
- m('td', imago_result.mol_img == null ?
- m('p','Recognizing...') :
- [
- m('img', { src: imago_result.mol_img}),
- m('p', m('button.transfer', {onclick: imago_result.transfer_cb} , 'Transfer to Ketcher'))
- ]
- )
- ])
- )
- ])
- ];
-};
-
-function upload(server, app, event) {
- var file = event.target.files[0];
-
- var request = server.imagoUpload(file, {
- headers: {
- 'Content-Type': file.type
- },
- background: true
- });
- imago_result.img_data = URL.createObjectURL(file);
- var poll = request.then(function (res) {
- m.redraw('imago');
- return x.pollDeferred(
- server.imagoUploadStatus.bind(server, {
- upload: res.upload_id
- }, null, { background: true }),
- function complete(res) {
- if (res.state === 'FAILURE')
- throw res;
- return res.state === "SUCCESS";
- }, 500, 300);
- });
- poll.then(function(res) {
- console.info('upload completed');
- if (res.state === 'SUCCESS') {
- console.info('SUCCESS');
- imago_result.mol_str = res.metadata.mol_str;
- res = server.render(
- {struct: imago_result.mol_str, output_format: "image/svg+xml"}, {
- headers: { 'Content-Type': 'application/json'}
- });
- res.then(function(data) {
- // console.info(data);
- imago_result.mol_img='data:image/svg+xml;charset=utf-8,' + encodeURIComponent(data);
- });
-
- imago_result.transfer_cb = function() {
- app.ketcher.setMolecule(imago_result.mol_str);
- m.route('search');
- };
- m.redraw('imago');
- }
- return true;
- }, function (res) {
- console.info('upload failed', res);
- e.alertMessage(JSON.stringify(res));
- });
- return false;
-}
-module.exports = imagoView;
diff --git a/utils/indigo-service-client/script/index.js b/utils/indigo-service-client/script/index.js
deleted file mode 100644
index ee97dadfbd..0000000000
--- a/utils/indigo-service-client/script/index.js
+++ /dev/null
@@ -1,77 +0,0 @@
-var m = require('mithril');
-var qs = require('query-string');
-
-var searchView = require('./search');
-var libsView = require('./libs');
-var imagoView = require('./imago');
-var api = require('./api');
-
-var app = {
- version: '__VERSION__',
- api_path: '__API_PATH__',
- libs: [],
- pages: [{
- url: '/search',
- view: searchView,
- title: 'Search'
- }, {
- url: '/libs',
- view: libsView,
- title: 'Libraries'
- }]
-};
-
-app.view = function(page) {
- console.info('redraw', page.url.slice(1));
- return [
- m('nav', [
- m('h1', 'Indigo Online'),
- m('ul', app.pages.map(function (pg) {
- return m('li', { 'class': page.url == pg.url ? 'active': '' },
- m('a', { href: pg.url, config: m.route }, pg.title));
- }))
- ]),
- m('main', { 'class': page.url.slice(1) }, [
- m('iframe', {
- src: '/ketcher/?api_path=/v2',
- onload: function() {
- app.ketcher = this.contentWindow.ketcher;
- // setTimeout(function () {
- // app.ketcher.setMolecule('CCCC');
- // }, 500);
- //app.ketcher.onStructChange()
- }
- }),
- page.view(app)
- ])
- ];
-};
-
-//initialize
-window.onload = function () {
- //document.title += ' v' + app.version;
- var opts = qs.parse(location.search);
- app.api_path = opts.api_path || app.api_path;
- app.server = api(app.api_path);
-
- app.server.libList().then(function (res) {
- res.forEach(function (lib) {
- lib.info = app.server.libInfo({ id: lib.id });
- });
- res.sort(function (a, b) {
- return b.created_timestamp - a.created_timestamp;
- });
- app.libs = res;
- });
-
- m.route.mode = "hash";
- m.route(document.body, '/search', app.pages.reduce(function (res, page) {
- res[page.url] = {
- view: app.view.bind(app, page),
- controller: function () {
- m.redraw.strategy('diff');
- }
- };
- return res;
- }, {}));
-};
diff --git a/utils/indigo-service-client/style/imago.less b/utils/indigo-service-client/style/imago.less
deleted file mode 100644
index df7fd27413..0000000000
--- a/utils/indigo-service-client/style/imago.less
+++ /dev/null
@@ -1,9 +0,0 @@
-img {
- width: 400px;
-}
-
-button.transfer {
- display: block;
- margin-left: auto;
- margin-right: auto;
-}
\ No newline at end of file
diff --git a/utils/indigo-service-uploader/.gitignore b/utils/indigo-service-uploader/.gitignore
deleted file mode 100644
index 1de565933b..0000000000
--- a/utils/indigo-service-uploader/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-target
\ No newline at end of file
diff --git a/utils/indigo-service-uploader/LICENSE b/utils/indigo-service-uploader/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/utils/indigo-service-uploader/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/utils/indigo-service-uploader/README.md b/utils/indigo-service-uploader/README.md
deleted file mode 100644
index bd2ada641d..0000000000
--- a/utils/indigo-service-uploader/README.md
+++ /dev/null
@@ -1,22 +0,0 @@
-# EPAM Indigo projects #
-
-Copyright (c) 2009-2015 EPAM Systems
-
-## Introduction ##
-
-This repository includes:
- * Java lang sources for uploder (old)
- * Rust lang sources (new)
-
-Main directory structure layout:
- * data: data structures
- * java: java sources and tests
- * rust: rust sources and tests
-
-## Source code organization ##
-
-
-## Build instructions ##
-
-./run_dc build
-./run_dc run --rm env cargo build --release
diff --git a/utils/indigo-service-uploader/java/pom.xml b/utils/indigo-service-uploader/java/pom.xml
deleted file mode 100644
index cf2cd2336a..0000000000
--- a/utils/indigo-service-uploader/java/pom.xml
+++ /dev/null
@@ -1,71 +0,0 @@
-
-
- 4.0.0
- com.epam
- indigo_service_uploader
- 1.0-b
- jar
-
-
- org.postgresql
- postgresql
- 42.3.3
-
-
- com.google.code.gson
- gson
- 2.8.9
-
-
- junit
- junit
- 4.13.2
- test
-
-
- org.apache.logging.log4j
- log4j-core
- 2.17.1
-
-
- javax.jms
- jms
-
-
- com.sun.jdmk
- jmxtools
-
-
- com.sun.jmx
- jmxri
-
-
- javax.mail
- mail
-
-
-
-
-
-
-
- maven-assembly-plugin
-
-
-
- com.epam.indigo.uploader.IndigoServiceUploader
-
-
-
- jar-with-dependencies
-
-
-
-
-
-
- UTF-8
- 1.8
- 1.8
-
-
diff --git a/utils/indigo-service-uploader/java/sql/test.sql b/utils/indigo-service-uploader/java/sql/test.sql
deleted file mode 100644
index 974bf55049..0000000000
--- a/utils/indigo-service-uploader/java/sql/test.sql
+++ /dev/null
@@ -1,34 +0,0 @@
-create table test1(m bytea, p jsonb NOT NULL DEFAULT '{}')
-
-select * from test_celgene
-
-select elems->>'idx_k',elems->>'idx_v' from test_celgene,jsonb_array_elements(p) elems where elems->>'idx_k' like '%id%' and (elems->>'idx_v')::float = 976001
-
-explain analyze
-select elems->>'org_k',elems->>'org_v' from test_celgene,jsonb_array_elements(p) elems where elems->>'idx_k' like '%mass%' and (elems->>'idx_v')::float > 300 limit 50 offset 150
-
-select elems->>'idx_k',elems->>'idx_v' from test_celgene,jsonb_array_elements(p) elems where elems->>'idx_k' like '%id%' and jsonb_typeof(elems->'idx_v') = 'number' and (elems->>'idx_v')::float = 976001
-
-select elems
-
-drop index test_celgene_idx
-create index test_celgene_idx on test_celgene USING bingo_idx (m bingo.bmolecule)
-
-select elems->>'org_k',elems->>'org_v' from test_celgene,jsonb_array_elements(p) elems limit 1
-
-
-select elems->>'a' as property ,elems->>'b' as value from test_celgene,jsonb_array_elements(p) elems where elems->>'x' like '%mass%' and jsonb_typeof(elems->'y') = 'number' and (elems->>'y')::float > 300 limit 50 offset 150
-
-select * from bingo.bingo_config
-drop index test_celgene_idx
-create index test_celgene_idx on test_celgene USING bingo_idx (m bingo.bmolecule) with (IGNORE_STEREOCENTER_ERRORS=1,IGNORE_CISTRANS_ERRORS=1,FP_TAU_SIZE=0)
-
-
-
-select count(*) from test
-select * from test limit 10
-drop table test;
-create table test (id serial, m bytea, p jsonb);
-select * from
-
-select * from test_indigo_upload
\ No newline at end of file
diff --git a/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/IndigoServiceUploader.java b/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/IndigoServiceUploader.java
deleted file mode 100644
index 1a759e45a2..0000000000
--- a/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/IndigoServiceUploader.java
+++ /dev/null
@@ -1,262 +0,0 @@
-package com.epam.indigo.uploader;
-
-import org.apache.logging.log4j.LogManager;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.zip.GZIPInputStream;
-
-public class IndigoServiceUploader {
- public static void main(String[] args) {
- if (args.length != 2) {
- System.out.println("Usage: uploader ");
- return;
- }
-
- /*
- * Detect single or multithreaded
- */
- boolean multiThreaded = Runtime.getRuntime().availableProcessors() > 1;
- startUpload(args[0], args[1], multiThreaded);
- }
-
-
- /*
- * Best time
- * 3200 structures per second insert
- * 1500 structures per second insert with indexing
- */
- public static int startUpload(String path, String table_name, boolean multiThreaded) {
-// int max_num = 10000;
- int str_num = 0;
- int max_num = -1;
-
- long total_time = System.currentTimeMillis();
- SqlBatchInserter insert = new SqlBatchInserter(table_name);
- try (InputStream molScanner = new GZIPInputStream(new FileInputStream(path))) {
- if(multiThreaded)
- str_num = insert.processParallel(molScanner, max_num);
- else
- str_num = insert.process(molScanner, max_num);
- } catch (IOException ex) {
- LogManager.getRootLogger().error(ex.getMessage(), ex);
- System.exit(0);
- }
-
-// PostgresEnv.createBingoIndex(table_name);
- total_time = (System.currentTimeMillis() - total_time);
-
- LogManager.getLogger("").info("Insert total time = " + total_time + " ms");
- LogManager.getLogger("").info("Average insert time = " + (int)((double)str_num / total_time * 1000.0) + " structures per second");
- LogManager.getLogger("").info("Total structures processed = " + str_num);
- return str_num;
- }
-
-
-
-// public static void importPG_1() {
-// StringBuilder sqlBuilder = new StringBuilder();
-// sqlBuilder.append("select bingo.getversion()");
-// ResultSet resultSet = PostgresEnv.getStatement().executeQuery(sqlBuilder.toString());
-// int str_num = 0;
-// while (resultSet.next()) {
-// System.out.println(resultSet.getString(1));
-// ++str_num;
-// }
-// String table_name = "test_celgene";
-// // PostgresEnv.dropCreateTable(table_name);
-//
-//// Indigo indigo = new Indigo();
-// int x = 0;
-//// int max_num = 10000;
-// int max_num = -1;
-//
-// long total_time = System.currentTimeMillis();
-// SqlBatchInserter insert = new SqlBatchInserter(table_name);
-// try (FileInputStream molScanner = new FileInputStream("/mnt/ramdisk/tmp/pubchem_100k.sd")) {
-// x = insert.process(molScanner, max_num);
-// } catch (FileNotFoundException ex) {
-// Logger.getRootLogger().error(ex.getMessage(), ex);
-// System.exit(0);
-// } catch (IOException ex) {
-// Logger.getRootLogger().error(ex.getMessage(), ex);
-// System.exit(0);
-// }
-// total_time = (System.currentTimeMillis() - total_time);
-//
-// Logger.getLogger("").info("Insert total time = " + total_time + " ms");
-// Logger.getLogger("").info("Average insert time = " + (int)((double)x / total_time * 1000.0) + " structures per second");
-// Logger.getLogger("").info("Total structures processed = " + x);
-//
-//// start = System.currentTimeMillis();
-//// createBingoIndex(table_name);
-//// Logger.getLogger(IndigoServiceUploader.class).info("Index time = " + (System.currentTimeMillis() - start) + " ms");
-// }
-//
-// private static void addJsonValue(StringBuilder params, String val) {
-// if (val.contains("\n")) {
-// StringTokenizer st = new StringTokenizer(val, "\n");
-// boolean started = false;
-// params.append("[");
-// while (st.hasMoreTokens()) {
-// if (started) {
-// params.append(",");
-// }
-// addJsonSimpleValue(params, st.nextToken());
-// started = true;
-// }
-// params.append("]");
-// } else {
-// addJsonSimpleValue(params, val);
-// }
-// }
-// private static void addJsonSimpleValue(StringBuilder params, String val) {
-// if (isNumeric(val)) {
-// String nVal = val;
-// String tmpVal;
-// while(nVal.startsWith("0") && nVal.length() > 1 && nVal.charAt(1) != '.') {
-// tmpVal = nVal.substring(1);
-// nVal = tmpVal;
-// }
-// params.append(nVal).append("");
-// } else {
-// params.append("\"").append(val.replace("\\", "\\\\").replace("\"", "\\\"").replace("\'", "\\\"")).append("\"");
-// }
-// }
-// private static void addParameters(String mol, StringBuilder params) {
-// params.append("{");
-// try (Scanner pScanner = new Scanner(new ByteArrayInputStream(mol.getBytes()))) {
-// pScanner.useDelimiter(java.util.regex.Pattern.quote("\n> <"));
-// /*
-// * Skip first
-// */
-// if (pScanner.hasNext()) {
-// pScanner.next();
-// }
-//
-// while (pScanner.hasNext()) {
-// String[] pLines = pScanner.next().trim().split("\n");
-// if(pLines.length > 0) {
-// String p_name = pLines[0].substring(0, pLines[0].lastIndexOf(">"));
-//
-// if (params.length() > 1) {
-// params.append(",");
-// }
-// params.append("\"").append(p_name).append("\":");
-//
-// addJsonValues(params, pLines);
-//
-// }
-// }
-// }
-// params.append("}");
-// }
-// private static void testProps() {int x = 0;
-// int max_num = 1;
-// int batch_size = 200;
-// long batch_time = 0, other_time = 0, start, other = 0;
-//
-// start = System.currentTimeMillis();
-// ByteArrayOutputStream buf = new ByteArrayOutputStream();
-// StringBuilder params = new StringBuilder();
-//
-// try (Scanner molScanner = new Scanner(new BufferedReader(new FileReader("/mnt/ramdisk/tmp/pubchem_100k.sd")))) {
-// molScanner.useDelimiter(java.util.regex.Pattern.quote("$$$$\n"));
-// while (molScanner.hasNext()) {
-// String mol = molScanner.next();
-//// for (IndigoObject mol : indigo.iterateSDFile("/mnt/ramdisk/tmp/pubchem_100k.sd")) {
-// other = System.currentTimeMillis();
-// buf.reset();
-// params.delete(0, params.length());
-// try (OutputStreamWriter sd = new OutputStreamWriter(new GZIPOutputStream(buf))) {
-//// try (OutputStreamWriter sd = new OutputStreamWriter(buf)){
-// sd.append(mol);
-// } catch (IOException ex) {
-//// Logger.getLogger(IndigoTools.class.getName()).log(Level.SEVERE, null, ex);
-// }
-// addParameters(mol, params);
-//
-// System.out.println(params.toString());
-//// System.out.println(mol);
-// if (x++ > max_num) {
-// break;
-// }
-// }
-// } catch (FileNotFoundException ex) {
-//// Logger.getLogger(IndigoTools.class.getName()).log(Level.SEVERE, null, ex);
-// }
-// }
-// private static void addJsonValues(StringBuilder params, String[] pLines) {
-// if (pLines.length > 2) {
-// params.append("[");
-// for(int i = 2; i < pLines.length; ++i) {
-// if (i > 2) {
-// params.append(",");
-// }
-// addJsonSimpleValue(params, pLines[i]);
-// }
-// params.append("]");
-// } else {
-// if(pLines.length > 1) {
-// addJsonSimpleValue(params, pLines[1]);
-// } else {
-// params.append("");
-// }
-// }
-// }
-// private static void addParameters(Properties props, StringBuilder params) {
-// params.append("{");
-// for (String p_name : props.stringPropertyNames()) {
-// if (params.length() > 1) {
-// params.append(",");
-// }
-// params.append("\"").append(p_name).append("\":");
-// addJsonSimpleValue(params, props.getProperty(p_name));
-// }
-// params.append("}");
-// }
- // public static void selectSubSet() {
-// Indigo indigo = new Indigo();
-// int x = 0;
-// int max_num = 10000;
-// try (BufferedWriter sd = new BufferedWriter(new OutputStreamWriter(new FileOutputStream("/mnt/ramdisk/tmp/pubchem_10k.sd")))) {
-// for (IndigoObject mol : indigo.iterateSDFile("/mnt/ramdisk/tmp/pubchem_1M_sd.gz")) {
-// sd.append(mol.rawData());
-// sd.newLine();
-// sd.append("$$$$");
-// sd.newLine();
-// if (x++ > max_num) {
-// break;
-// }
-// }
-// } catch (FileNotFoundException ex) {
-// Logger.getLogger(IndigoTools.class.getName()).log(Level.SEVERE, null, ex);
-// } catch (IOException ex) {
-// Logger.getLogger(IndigoTools.class.getName()).log(Level.SEVERE, null, ex);
-// }
-// }
-// public static void convertToOneGz() {
-// Indigo indigo = new Indigo();
-//
-// String sd_path = "/mnt/ramdisk/tmp/PubChem.gz/";
-// try (BufferedWriter gz = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(sd_path + "../pubchem_1M_sd.gz"))))) {
-// File f = new File(sd_path);
-// for (File fs : f.listFiles()) {
-// System.out.println("Process " + fs.getAbsolutePath());
-// for (IndigoObject mol : indigo.iterateSDFile(fs.getAbsolutePath())) {
-// gz.append(mol.rawData());
-// gz.newLine();
-// gz.append("$$$$");
-// gz.newLine();
-// }
-// }
-// } catch (FileNotFoundException ex) {
-// Logger.getLogger(IndigoTools.class.getName()).log(Level.SEVERE, null, ex);
-// } catch (IOException ex) {
-// Logger.getLogger(IndigoTools.class.getName()).log(Level.SEVERE, null, ex);
-// }
-// }
-}
-
-
diff --git a/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/JsonParser.java b/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/JsonParser.java
deleted file mode 100644
index 73be7238c5..0000000000
--- a/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/JsonParser.java
+++ /dev/null
@@ -1,75 +0,0 @@
-package com.epam.indigo.uploader;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonPrimitive;
-import java.util.Properties;
-import java.util.regex.Pattern;
-import org.apache.logging.log4j.LogManager;
-
-/**
- *
- */
-public class JsonParser {
-
- private final Gson gson = new Gson();
- private final Pattern numberPattern = Pattern.compile("[-]?[0-9]*\\.?[0-9]+");
-
- public JsonParser() {
- }
-
- public boolean isNumeric(String str) {
- return numberPattern.matcher(str).matches();
- }
-
- public static double getNumber(String str) throws NumberFormatException {
- return Double.parseDouble(str);
- }
-
- public JsonElement getPropertValue(String val) {
- if(isNumeric(val)) {
- try {
- return new JsonPrimitive(getNumber(val));
- } catch (NumberFormatException e) {
- LogManager.getLogger(JsonParser.class.getName()).warn(e.getMessage());
- }
- }
-
- return new JsonPrimitive(val.toLowerCase());
- }
-
- public String parseParametersIntoJson(Properties props) {
- JsonArray jsonProps = new JsonArray();
- for (String p_name : props.stringPropertyNames()) {
- String p_val = props.getProperty(p_name);
- JsonObject elem = new JsonObject();
-
- elem.addProperty("x", p_name.trim().toLowerCase());
- elem.add("y", getPropertValue(p_val));
-
- elem.addProperty("a", p_name);
- elem.addProperty("b", p_val);
-
- jsonProps.add(elem);
- }
- return gson.toJson(jsonProps);
- }
-// public String parseParametersToJson(Gson gson, Properties props) {
-// JsonArray jsonProps = new JsonArray();
-// for (String p_name : props.stringPropertyNames()) {
-// String p_val = props.getProperty(p_name);
-// JsonObject elem = new JsonObject();
-//
-// elem.addProperty("idx_k", p_name.trim().toLowerCase());
-// elem.add("idx_v", getPropertValue(p_val));
-//
-// elem.addProperty("org_k", p_name);
-// elem.addProperty("org_v", p_val);
-//
-// jsonProps.add(elem);
-// }
-// return gson.toJson(jsonProps);
-// }
-}
diff --git a/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/PostgresEnv.java b/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/PostgresEnv.java
deleted file mode 100644
index 8f9a9adc27..0000000000
--- a/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/PostgresEnv.java
+++ /dev/null
@@ -1,89 +0,0 @@
-package com.epam.indigo.uploader;
-
-import org.apache.logging.log4j.LogManager;
-
-import java.io.IOException;
-import java.sql.*;
-import java.util.Properties;
-
-
-public class PostgresEnv {
-
- public static void dropCreateTable(String table_name) {
- try {
- PostgresEnv.getStatement().executeUpdate("DROP TABLE IF EXISTS " + table_name);
- PostgresEnv.getStatement().executeUpdate("CREATE TABLE " + table_name + "(s serial, m bytea, p jsonb NOT NULL DEFAULT '{}')");
- } catch (SQLException ex) {
- LogManager.getLogger("PostgresEnv").error("Couldn't create a table!", ex);
- }
- }
-
- public static void createBingoIndex(String table_name) {
- String index_name = table_name + "_idx";
- try {
- getStatement().executeUpdate("CREATE INDEX " + index_name + " ON " + table_name + " USING bingo_idx (m bingo.bmolecule) with (IGNORE_STEREOCENTER_ERRORS=1,IGNORE_CISTRANS_ERRORS=1,FP_TAU_SIZE=0)");
- } catch (SQLException ex) {
- LogManager.getLogger("PostgresEnv").error("Couldn't create an index!", ex);
- }
- }
- private Connection _connection = null;
- private Properties _parameters = null;
-
-
- private PostgresEnv() {
- try {
- Class.forName("org.postgresql.Driver");
- } catch (ClassNotFoundException cnfe) {
- LogManager.getLogger("PostgresEnv").error("Couldn't find the driver!", cnfe);
- System.out.println("PostgreSQL driver not found");
- System.exit(2);
- }
-
- LogManager.getLogger("PostgresEnv").info("Registered the driver OK.");
-
- try {
- _parameters = new Properties();
- _parameters.loadFromXML(getClass().getResourceAsStream("/database.xml"));
- _connection = DriverManager.getConnection(_parameters.getProperty("db_url"), _parameters);
- } catch (IOException | SQLException se) {
- LogManager.getLogger(PostgresEnv.class).error("Couldn't connect: print out a stack trace and exit.", se);
- System.exit(1);
- }
-
- if (_connection != null) {
- LogManager.getLogger(PostgresEnv.class.getName()).info("Successfully connected to a database");
- } else {
- LogManager.getLogger(PostgresEnv.class.getName()).error("We should never get here.");
- }
- }
-
-
- public static String getSchemaName() {
- return getInstance()._parameters.getProperty("schema_name");
- }
- public static String getDataDir() {
- return getInstance()._parameters.getProperty("data_dir");
- }
-
- public static PostgresEnv getInstance() {
- return PostgresEnvHolder.INSTANCE;
- }
-
- public static Connection getConnection() {
- return getInstance()._connection;
- }
-
- public static Statement getStatement() {
- Statement res = null;
- try {
- res = getInstance()._connection.createStatement();
- } catch (SQLException ex) {
- LogManager.getLogger(PostgresEnv.class.getName()).error(ex);
- }
- return res;
- }
-
- private static class PostgresEnvHolder {
- private static final PostgresEnv INSTANCE = new PostgresEnv();
- }
-}
diff --git a/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/SdfIterator.java b/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/SdfIterator.java
deleted file mode 100644
index 81398581fa..0000000000
--- a/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/SdfIterator.java
+++ /dev/null
@@ -1,124 +0,0 @@
-package com.epam.indigo.uploader;
-
-import java.io.BufferedInputStream;
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.util.Iterator;
-import java.util.Properties;
-import java.util.Scanner;
-
-
-public class SdfIterator implements Iterable {
-
- static SDItem createItem(String str) {
- return new SDItem(str);
- }
- private final Scanner _molScanner;
- private String _nextElement;
-
- public Iterable delimIter() {
- return () -> {
- _readNextElement();
- return new Iterator() {
-
- @Override
- public boolean hasNext() {
- return (_nextElement != null);
- }
-
- @Override
- public String next() {
- String result = _nextElement;
- _readNextElement();
- return result;
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException();
- }
- };
- };
- }
-
- public static class SDItem {
- public String mol = "";
- public final Properties props = new Properties();
- public static final String SD_PROPERTIES = "\\Q\n>\\E.*\\Q<\\E";
- public byte[] buf;
- public Object jsonObject;
-
- private SDItem(String mol) {
- try (Scanner pScanner = new Scanner(new ByteArrayInputStream(mol.getBytes()))) {
- pScanner.useDelimiter(SD_PROPERTIES);
- /*
- * Read molfile
- */
- if (pScanner.hasNext()) {
- this.mol = pScanner.next();
- }
- /*
- * Read properties
- */
- while (pScanner.hasNext()) {
- String propList = pScanner.next().trim();
- String p_name, p_val = "";
-
- if(propList.length() > 0) {
- int key_idx = propList.indexOf("\n");
- if(key_idx == -1) {
- key_idx = propList.length();
- }
- p_name = propList.substring(0, key_idx);
- int k_idx = p_name.lastIndexOf(">");
- if(k_idx > 0 ) {
- p_name = p_name.substring(0, k_idx);
- }
-
- if(key_idx < propList.length() - 1) {
- p_val = propList.substring(key_idx+1);
- }
- props.setProperty(p_name, p_val);
- }
- }
- }
- }
- }
-
- public SdfIterator(InputStream str) {
- _molScanner = new Scanner(new BufferedInputStream(str));
- _molScanner.useDelimiter(java.util.regex.Pattern.quote("$$$$") + "((\\r\\n)|(\\n))");
- }
- void _readNextElement() {
- _nextElement = null;
- if(_molScanner.hasNext()) {
- String mol = _molScanner.next();
- if(mol.trim().length() > 0) {
- _nextElement = mol;
- }
- }
- }
- @Override
- public Iterator iterator() {
- _readNextElement();
- return new Iterator() {
- @Override
- public boolean hasNext() {
- return (_nextElement != null);
- }
- @Override
- public SDItem next() {
- String result = _nextElement;
- _readNextElement();
- if(result != null)
- return new SDItem(result);
- return null;
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException();
- }
- };
- }
-}
diff --git a/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/SqlBatchInserter.java b/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/SqlBatchInserter.java
deleted file mode 100644
index 6cbb7b7df4..0000000000
--- a/utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/SqlBatchInserter.java
+++ /dev/null
@@ -1,202 +0,0 @@
-package com.epam.indigo.uploader;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStreamWriter;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.util.AbstractQueue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingDeque;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.zip.GZIPOutputStream;
-
-import org.apache.logging.log4j.LogManager;
-import org.postgresql.util.PGobject;
-
-
-public class SqlBatchInserter {
-
- private final String _tableName;
- public static final int BATCH_SIZE = 200;
-
- public SqlBatchInserter(String table_name) {
- this._tableName = table_name;
- }
-
-
- private static class SDWorker implements Runnable {
- private static final ThreadLocal parser = new ThreadLocal<>();
- private final AbstractQueue sdQueue;
- private final String str;
-
- private SDWorker(AbstractQueue sdQueue, String str) {
- this.sdQueue = sdQueue;
- this.str = str;
- }
-
- @Override
- public void run() {
- ByteArrayOutputStream buf = new ByteArrayOutputStream();
- if(parser.get() == null) {
- parser.set(new JsonParser());
- }
- try {
- SdfIterator.SDItem sdItem = SdfIterator.createItem(str);
- try (OutputStreamWriter sd = new OutputStreamWriter(new GZIPOutputStream(buf))) {
- sd.append(sdItem.mol);
- } catch (IOException ex) {
- LogManager.getRootLogger().error(ex.getMessage(), ex);
- }
- String paramsJson = parser.get().parseParametersIntoJson(sdItem.props);
- PGobject dataObject = new PGobject();
- dataObject.setType("jsonb");
- dataObject.setValue(paramsJson);
-
- sdItem.jsonObject = dataObject;
- sdItem.buf = buf.toByteArray();
-
- sdQueue.add(sdItem);
-
- } catch (SQLException ex) {
- }
- }
-
- }
-
- private class PgBatchInserter implements Runnable {
- private final AtomicBoolean allProcessed;
- private final AtomicInteger numberProcesed;
- private final LinkedBlockingDeque sdQueue;
-
- private PgBatchInserter(AtomicBoolean allProcessed, AtomicInteger numberProcesed, LinkedBlockingDeque sdQueue) {
- this.allProcessed = allProcessed;
- this.numberProcesed = numberProcesed;
- this.sdQueue = sdQueue;
- }
- @Override
- public void run() {
- int localProcessed = 0;
- try (PreparedStatement ps = PostgresEnv.getConnection().prepareStatement("INSERT INTO " + _tableName + "(m,p) VALUES (?,?)")) {
- while (!Thread.currentThread().isInterrupted() && (!allProcessed.get() || (numberProcesed.get() != localProcessed))) {
- try {
- SdfIterator.SDItem sd = sdQueue.takeFirst();
- ps.setBytes(1, sd.buf);
- ps.setObject(2, sd.jsonObject);
- ps.addBatch();
-
- localProcessed++;
- if (localProcessed % BATCH_SIZE == 0) {
- ps.executeBatch();
- }
- } catch (InterruptedException ex) {
- }
- }
- ps.executeBatch();
- } catch (SQLException ex) {
- LogManager.getRootLogger().error(ex.getMessage(), ex);
- }
- }
-
- }
-
- int process(InputStream molScanner) {
- return process(molScanner, -1);
- }
- int processParallel(InputStream molScanner) {
- return processParallel(molScanner, -1);
- }
- int processParallel(InputStream molScanner, int max_num) {
- try {
- int iter_num = 0;
-
- final AtomicBoolean allProcessed = new AtomicBoolean(false);
- final AtomicInteger numberProcesed = new AtomicInteger(0);
-
- final LinkedBlockingDeque sdQueue = new LinkedBlockingDeque<>();
-
- Thread sqlInserter = new Thread(new PgBatchInserter(allProcessed, numberProcesed, sdQueue));
-
- sqlInserter.start();
-// ExecutorService exec = Executors.newCachedThreadPool();
- ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()) ;
- SdfIterator sdIter = new SdfIterator(molScanner);
- for (String str : sdIter.delimIter()) {
- exec.submit(new SDWorker(sdQueue, str));
- iter_num++;
- if(iter_num % 1000 == 0) {
- LogManager.getRootLogger().info("Processed " + iter_num);
- }
- if (max_num > 0 && iter_num > max_num) {
- break;
- }
- }
- numberProcesed.compareAndSet(0, iter_num);
- allProcessed.compareAndSet(false, true);
- sqlInserter.join();
-
- exec.shutdown();
- while (!exec.isTerminated()) {
- }
-
- return iter_num;
- } catch (InterruptedException ex) {
- }
- return 0;
- }
-
-
- int process(InputStream molScanner, int max_num) {
- ByteArrayOutputStream buf = new ByteArrayOutputStream();
- StringBuilder params = new StringBuilder();
- JsonParser parser = new JsonParser();
-
- long other, other_time = 0, batch_time = 0;
- int iter_num = 0;
- try (PreparedStatement ps = PostgresEnv.getConnection().prepareStatement("INSERT INTO " + _tableName + "(m,p) VALUES (?,?)")) {
- SdfIterator sdIter = new SdfIterator(molScanner);
- for (SdfIterator.SDItem str : sdIter) {
- other = System.currentTimeMillis();
- buf.reset();
- params.delete(0, params.length());
- try (OutputStreamWriter sd = new OutputStreamWriter(new GZIPOutputStream(buf))) {
- sd.append(str.mol);
- } catch (IOException ex) {
- LogManager.getRootLogger().error(ex.getMessage(), ex);
- }
- params.append(parser.parseParametersIntoJson(str.props));
-
- PGobject dataObject = new PGobject();
- dataObject.setType("jsonb");
- dataObject.setValue(params.toString());
-
- other_time += (System.currentTimeMillis() - other);
-
- ps.setBytes(1, buf.toByteArray());
- ps.setObject(2, dataObject);
- ps.addBatch();
- if (iter_num % BATCH_SIZE == 0) {
- long batch_start = System.currentTimeMillis();
- ps.executeBatch();
- batch_time += (System.currentTimeMillis() - batch_start);
- LogManager.getLogger("").info("Processed " + iter_num);
- }
- iter_num++;
- if (max_num > 0 && iter_num > max_num) {
- break;
- }
- }
- ps.executeBatch();
- } catch (SQLException ex) {
- LogManager.getRootLogger().error(ex.getMessage(), ex);
- }
-
- LogManager.getLogger("").info("Batch time = " + batch_time + " ms");
- LogManager.getLogger("").info("Other time = " + other_time + " ms");
- return iter_num;
- }
-
-}
diff --git a/utils/indigo-service-uploader/java/src/main/resources/database.xml b/utils/indigo-service-uploader/java/src/main/resources/database.xml
deleted file mode 100644
index de2c8b297e..0000000000
--- a/utils/indigo-service-uploader/java/src/main/resources/database.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-postgres database parameters
-indigoservice
-p@ssw0rd
-jdbc:postgresql://indigo_db/indigoservice
-indigoservice
-
diff --git a/utils/indigo-service-uploader/java/src/main/resources/log4j.xml b/utils/indigo-service-uploader/java/src/main/resources/log4j.xml
deleted file mode 100644
index 143b9bb209..0000000000
--- a/utils/indigo-service-uploader/java/src/main/resources/log4j.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/utils/indigo-service-uploader/java/src/test/java/com/epam/indigo/uploader/IndigoServiceUploaderTest.java b/utils/indigo-service-uploader/java/src/test/java/com/epam/indigo/uploader/IndigoServiceUploaderTest.java
deleted file mode 100644
index a67ec86a0f..0000000000
--- a/utils/indigo-service-uploader/java/src/test/java/com/epam/indigo/uploader/IndigoServiceUploaderTest.java
+++ /dev/null
@@ -1,260 +0,0 @@
-package com.epam.indigo.uploader;
-
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.zip.GZIPInputStream;
-import static org.junit.Assert.*;
-import org.junit.*;
-import org.junit.rules.TestName;
-
-
-
-public class IndigoServiceUploaderTest {
- public static final String TEST_SCHEME = "test_upload";
-
- @Rule public final TestName name = new TestName();
-
-// @BeforeClass
-// public static void init() throws SQLException {
-// PostgresEnv.getStatement().executeUpdate("DROP SCHEMA IF EXISTS " + TEST_SCHEME + " CASCADE");
-// PostgresEnv.getStatement().executeUpdate("CREATE SCHEMA " + TEST_SCHEME);
-// }
-
- public IndigoServiceUploaderTest() {
- }
-
- @Test
- public void testDatabaseBingoVersion() throws SQLException {
- String sqlBuilder = "select bingo.getversion()";
- ResultSet resultSet = PostgresEnv.getStatement().executeQuery(sqlBuilder);
- int str_num = 0;
- while (resultSet.next()) {
- ++str_num;
- }
- assertEquals(1, str_num);
- }
-
- @Test
- public void testBasicSDIterator() throws FileNotFoundException {
- SdfIterator sd = new SdfIterator(new FileInputStream("data/test_pubchem_10.sdf"));
- int x = 0, p=0;
-
- for(SdfIterator.SDItem str : sd) {
- ++x;
- p += str.props.size();
- }
- assertEquals(10, x);
- assertEquals(325, p);
- }
- @Test
- public void testBasicSDScope() throws IOException {
- try (InputStream test = new GZIPInputStream(new FileInputStream("data/test-18.sd.gz"))) {
- SdfIterator sd = new SdfIterator(test);
- int x = 0, p = 0;
- String last_mol = "";
-
- for (SdfIterator.SDItem str : sd) {
- ++x;
- p += str.props.size();
- last_mol = str.mol;
- }
- assertEquals(18, x);
- assertEquals(576, p);
- assertTrue(last_mol.startsWith("StarDropID 3"));
- }
- try (InputStream test = new GZIPInputStream(new FileInputStream("data/test-108.sd.gz"))) {
- SdfIterator sd = new SdfIterator(test);
- int x = 0, p = 0;
-
- for (SdfIterator.SDItem str : sd) {
- ++x;
- p += str.props.size();
- }
- assertEquals(108, x);
- assertEquals(3456, p);
- }
- try (InputStream test = new GZIPInputStream(new FileInputStream("data/test-2759.sd.gz"))) {
- SdfIterator sd = new SdfIterator(test);
- int x = 0, p = 0;
-
- for (SdfIterator.SDItem str : sd) {
- ++x;
- p += str.props.size();
- }
- assertEquals(2759, x);
- assertEquals(8277, p);
- }
- }
-
- @Test
- public void testSDInsertBasic() throws IOException, SQLException {
- String table_name = TEST_SCHEME + "." + name.getMethodName();
- PostgresEnv.dropCreateTable(table_name);
- SqlBatchInserter insert = new SqlBatchInserter(table_name);
- try (FileInputStream molScanner = new FileInputStream("data/test_pubchem_10.sdf")) {
- insert.process(molScanner);
- }
-
- String sqlBuilder = "select elems->>'a',elems->>'b' from " +
- table_name +
- ",jsonb_array_elements(p) elems where elems->>'x' like '%mass%' and (elems->>'y')::float > 300";
-
- ResultSet resultSet = PostgresEnv.getStatement().executeQuery(sqlBuilder);
- int str_num = 0;
- while (resultSet.next()) {
- ++str_num;
- }
- assertEquals(6, str_num);
- }
-
- @Test
- public void testSDInsertParallelBasic() throws IOException, SQLException {
- String table_name = TEST_SCHEME + "." + name.getMethodName();
- PostgresEnv.dropCreateTable(table_name);
- SqlBatchInserter insert = new SqlBatchInserter(table_name);
- try (FileInputStream molScanner = new FileInputStream("data/test_pubchem_10.sdf")) {
- insert.processParallel(molScanner);
- }
-
- String sqlBuilder = "select elems->>'a',elems->>'b' from " +
- table_name +
- ",jsonb_array_elements(p) elems where elems->>'x' like '%mass%' and (elems->>'y')::float > 300";
-
- ResultSet resultSet = PostgresEnv.getStatement().executeQuery(sqlBuilder);
- int str_num = 0;
- String last_key = "";
- String last_val = "";
- while (resultSet.next()) {
- ++str_num;
- last_key = resultSet.getString(1);
- last_val = resultSet.getString(2);
- }
- assertEquals(6, str_num);
- assertEquals("PUBCHEM_EXACT_MASS", last_key);
- }
- @Test
- public void testSDInsertMaybridge() throws IOException, SQLException {
- String table_name = TEST_SCHEME + "." + name.getMethodName();
- PostgresEnv.dropCreateTable(table_name);
- SqlBatchInserter insert = new SqlBatchInserter(table_name);
- try (GZIPInputStream molScanner = new GZIPInputStream(new FileInputStream("data/maybridge-stardrop-sample.sd.gz"))) {
- insert.processParallel(molScanner);
- }
-
- String sqlBuilder = "select elems->>'a',elems->>'b' from " +
- table_name +
- ",jsonb_array_elements(p) elems where elems->>'x' like '%logp%' and (elems->>'y')::float > 5";
-
- ResultSet resultSet = PostgresEnv.getStatement().executeQuery(sqlBuilder);
- int str_num = 0;
- String last_key = "";
- String last_val = "";
- while (resultSet.next()) {
- ++str_num;
- last_key = resultSet.getString(1);
- last_val = resultSet.getString(2);
- }
- assertEquals(16, str_num);
- assertEquals("logP", last_key);
- resultSet = PostgresEnv.getStatement().executeQuery("select * from " + table_name);
- str_num = 0;
- while (resultSet.next()) {
- ++str_num;
- }
- assertEquals(108, str_num);
- }
- @Test
- public void testSDInsertParrallelCorrect18() throws IOException, SQLException {
- String table_name = TEST_SCHEME + "." + name.getMethodName();
- PostgresEnv.dropCreateTable(table_name);
- SqlBatchInserter insert = new SqlBatchInserter(table_name);
- try (GZIPInputStream molScanner = new GZIPInputStream(new FileInputStream("data/test-18.sd.gz"))) {
- insert.processParallel(molScanner);
- }
-
- String sqlBuilder = "select bingo.checkmolecule(m) from " +
- table_name;
-
- ResultSet resultSet = PostgresEnv.getStatement().executeQuery(sqlBuilder);
- int str_num = 0;
- String last_key;
- while (resultSet.next()) {
- ++str_num;
- last_key = resultSet.getString(1);
- assertNull(last_key);
- }
- assertEquals(18, str_num);
- }
- @Test
- public void testSDFloatNumbers() throws IOException, SQLException {
- String table_name = TEST_SCHEME + "." + name.getMethodName();
- PostgresEnv.dropCreateTable(table_name);
- SqlBatchInserter insert = new SqlBatchInserter(table_name);
- try (FileInputStream molScanner = new FileInputStream("data/test-18-floats.sdf")) {
- insert.processParallel(molScanner);
- }
- {
- String sqlBuilder = "select elems->>'a',elems->>'b' from " +
- table_name +
- ",jsonb_array_elements(p) elems where elems->>'x' like '%logs%' and (elems->>'y')::float > 0.5";
-
- ResultSet resultSet = PostgresEnv.getStatement().executeQuery(sqlBuilder);
- int str_num = 0;
- int contains = 0;
- while (resultSet.next()) {
- ++str_num;
- contains += resultSet.getString(2).contains("777") ? 1 : 0;
- }
- assertEquals(36, str_num);
- assertEquals(1, contains);
- }
- {
- String sqlBuilder = "select elems->>'a',elems->>'b' from " +
- table_name +
- ",jsonb_array_elements(p) elems where elems->>'x' like '%logs%' and (elems->>'y')::float > 1";
-
- ResultSet resultSet = PostgresEnv.getStatement().executeQuery(sqlBuilder);
- int str_num = 0;
- int contains = 0;
- while (resultSet.next()) {
- ++str_num;
- contains += resultSet.getString(2).contains("777") ? 1 : 0;
- }
- assertEquals(26, str_num);
- assertEquals(0, contains);
- }
- }
-// @Test
-// public void testSDInsertShrodinger() throws FileNotFoundException, IOException, SQLException {
-// String table_name = "test_unit_shrodinger";
-// PostgresEnv.dropCreateTable(table_name);
-// SqlBatchInserter insert = new SqlBatchInserter(table_name);
-// try (GZIPInputStream molScanner = new GZIPInputStream(new FileInputStream("data/test_from_schrodinger.sd.gz"))) {
-// insert.processParallel(molScanner);
-// }
-//
-// PostgresEnv.createBingoIndex(table_name);
-//
-// StringBuilder sqlBuilder = new StringBuilder();
-// sqlBuilder.append("select elems->>'a',elems->>'b' from ")
-// .append(table_name)
-// .append(",jsonb_array_elements(p) elems where elems->>'x' like '%flexibility%' and (elems->>'y')::float > 0.3");
-//
-// ResultSet resultSet = PostgresEnv.getStatement().executeQuery(sqlBuilder.toString());
-// int str_num = 0;
-// String last_key = "";
-// String last_val = "";
-// while (resultSet.next()) {
-// ++str_num;
-// last_key = resultSet.getString(1);
-// last_val = resultSet.getString(2);
-// }
-// assertEquals(2, str_num);
-// assertEquals("Flexibility", last_key);
-// }
-
-}
diff --git a/utils/indigo-service-uploader/rust/Cargo.lock b/utils/indigo-service-uploader/rust/Cargo.lock
deleted file mode 100644
index 577344e80e..0000000000
--- a/utils/indigo-service-uploader/rust/Cargo.lock
+++ /dev/null
@@ -1,255 +0,0 @@
-[root]
-name = "indigo_uploader"
-version = "0.1.1"
-dependencies = [
- "flate2 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
- "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
- "num_cpus 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "postgres 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "postgres-binary-copy 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 0.1.47 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
- "threadpool 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
- "yaml-rust 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "advapi32-sys"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "aho-corasick"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "memchr 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "bufstream"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "byteorder"
-version = "0.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "cfg-if"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "flate2"
-version = "0.2.12"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "gcc"
-version = "0.3.21"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "getopts"
-version = "0.2.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "kernel32-sys"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "libc"
-version = "0.2.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "log"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "memchr"
-version = "0.1.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "miniz-sys"
-version = "0.1.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "gcc 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "net2"
-version = "0.2.20"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num_cpus"
-version = "0.2.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "phf"
-version = "0.7.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "phf_shared 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "phf_codegen"
-version = "0.7.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "phf_generator 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "phf_shared 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "phf_generator"
-version = "0.7.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "phf_shared 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "phf_shared"
-version = "0.7.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "postgres"
-version = "0.10.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "bufstream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "byteorder 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "net2 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "phf 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "phf_codegen 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "postgres-binary-copy"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "byteorder 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "postgres 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "rand"
-version = "0.3.13"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "regex"
-version = "0.1.47"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "aho-corasick 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "memchr 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex-syntax 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "regex-syntax"
-version = "0.2.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "rustc-serialize"
-version = "0.3.16"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "threadpool"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "time"
-version = "0.1.34"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "winapi"
-version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "winapi-build"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "ws2_32-sys"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "yaml-rust"
-version = "0.3.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
diff --git a/utils/indigo-service-uploader/rust/Cargo.toml b/utils/indigo-service-uploader/rust/Cargo.toml
deleted file mode 100644
index 51e3869291..0000000000
--- a/utils/indigo-service-uploader/rust/Cargo.toml
+++ /dev/null
@@ -1,26 +0,0 @@
-[package]
-name = "indigo_uploader"
-version = "0.1.1"
-authors = [ "Savelev_Aleksandr@epam.com" ]
-
-[dependencies]
-rustc-serialize = "0.3.16"
-postgres-binary-copy = "0.1.1"
-flate2 = "0.2.9"
-regex = "*"
-time = "*"
-threadpool = "0.2"
-num_cpus = "0.2"
-getopts = "0.2"
-yaml-rust = "0.3.0"
-
-[dependencies.postgres]
-version = "0.10"
-features = ["rustc-serialize"]
-
-[lib]
-name = "sd_import"
-path = "src/sd_import/sd_import.rs"
-
-[[bin]]
-name = "indigo_uploader"
\ No newline at end of file
diff --git a/utils/indigo-service-uploader/rust/Dockerfile b/utils/indigo-service-uploader/rust/Dockerfile
deleted file mode 100644
index fee501524e..0000000000
--- a/utils/indigo-service-uploader/rust/Dockerfile
+++ /dev/null
@@ -1,36 +0,0 @@
-FROM debian:jessie
-
-ENV DEBIAN_FRONTEND noninteractive
-
-RUN apt-get update -qq
-
-RUN apt-get install -y --no-install-recommends \
- build-essential \
- curl \
- ca-certificates \
- libssl-dev \
- git
-
-WORKDIR /opt
-
-ENV RUST_VERSION=rust-1.7.0-x86_64-unknown-linux-gnu
-
-RUN curl -sO http://static.rust-lang.org/dist/$RUST_VERSION.tar.gz && \
-tar -xzf $RUST_VERSION.tar.gz && \
-./$RUST_VERSION/install.sh --without=rust-docs
-
-
-RUN DEBIAN_FRONTEND=noninteractive apt-get autoremove -y && \
- rm -rf \
- $RUST_VERSION \
- $RUST_VERSION.tar.gz \
- /var/lib/apt/lists/* \
- /tmp/* \
- /var/tmp/* \
- mkdir /code
-
-COPY . /code
-
-WORKDIR /code
-#RUN cargo build --release
-
diff --git a/utils/indigo-service-uploader/rust/config.yml b/utils/indigo-service-uploader/rust/config.yml
deleted file mode 100644
index c11a92532b..0000000000
--- a/utils/indigo-service-uploader/rust/config.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-database:
- url: localhost
- db: test
- user: tarquin
- pass: postgres
- schema: public
-general:
- buffer_size: 10000
- structure_column: m
- properties_column: p
\ No newline at end of file
diff --git a/utils/indigo-service-uploader/rust/debug.docker-compose.yml b/utils/indigo-service-uploader/rust/debug.docker-compose.yml
deleted file mode 100644
index ecc875c954..0000000000
--- a/utils/indigo-service-uploader/rust/debug.docker-compose.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-debug:
- build: ./
- dockerfile: ./stable.Dockerfile
- volumes:
- - ./:/code
- command:
- bash -c "useradd -m -s /bin/bash $USER || true && /bin/su -s /bin/bash -c 'cargo build' $USER"
diff --git a/utils/indigo-service-uploader/rust/docker-compose.yml b/utils/indigo-service-uploader/rust/docker-compose.yml
deleted file mode 100644
index b884fa1d36..0000000000
--- a/utils/indigo-service-uploader/rust/docker-compose.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-release:
- build: ./
- dockerfile: ./Dockerfile
- volumes:
- - ./:/code
- command:
- bash -c "useradd -m -s /bin/bash $USER || true && /bin/su -s /bin/bash -c 'cargo build --release' $USER"
diff --git a/utils/indigo-service-uploader/rust/run_dc.sh b/utils/indigo-service-uploader/rust/run_dc.sh
deleted file mode 100644
index c5f7cc865d..0000000000
--- a/utils/indigo-service-uploader/rust/run_dc.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/bash
-#
-# Run docker-compose in a container
-#
-# This script will attempt to mirror the host paths by using volumes for the
-# following paths:
-# * $(pwd)
-# * $(dirname $COMPOSE_FILE) if it's set
-# * $HOME if it's set
-#
-# You can add additional volumes (or any docker run options) using
-# the $COMPOSE_OPTIONS environment variable.
-#
-
-
-set -e
-
-VERSION="1.5.2"
-IMAGE="docker/compose:$VERSION"
-
-
-# Setup options for connecting to docker host
-if [ -z "$DOCKER_HOST" ]; then
- DOCKER_HOST="/var/run/docker.sock"
-fi
-if [ -S "$DOCKER_HOST" ]; then
- DOCKER_ADDR="-v $DOCKER_HOST:$DOCKER_HOST -e DOCKER_HOST"
-else
- DOCKER_ADDR="-e DOCKER_HOST -e DOCKER_TLS_VERIFY -e DOCKER_CERT_PATH"
-fi
-
-
-# Setup volume mounts for compose config and context
-VOLUMES="-v $(pwd):$(pwd)"
-if [ -n "$COMPOSE_FILE" ]; then
- compose_dir=$(dirname $COMPOSE_FILE)
-fi
-# TODO: also check --file argument
-if [ -n "$compose_dir" ]; then
- VOLUMES="$VOLUMES -v $compose_dir:$compose_dir"
-fi
-if [ -n "$HOME" ]; then
- VOLUMES="$VOLUMES -v $HOME:$HOME"
-fi
-
-
-exec docker run --rm -ti $DOCKER_ADDR $COMPOSE_OPTIONS $VOLUMES -w $(pwd) $IMAGE $@
diff --git a/utils/indigo-service-uploader/rust/src/main.rs b/utils/indigo-service-uploader/rust/src/main.rs
deleted file mode 100644
index 20adc3d4e7..0000000000
--- a/utils/indigo-service-uploader/rust/src/main.rs
+++ /dev/null
@@ -1,50 +0,0 @@
-extern crate getopts;
-extern crate sd_import;
-
-use sd_import::SdImport;
-
-use getopts::Options;
-use std::env;
-
-fn import(file_name: &str, table_name: &str, config_name: Option) {
- let conf_name = config_name.unwrap_or("config.yml".to_string());
- let mut sd_import = SdImport::new(&conf_name);
- sd_import.insert(file_name, table_name);
-}
-
-fn print_usage(program: &str, opts: Options) {
- let brief = format!("Usage: {} [options] ", program);
- print!("{}", opts.usage(&brief));
-}
-
-fn main() {
- let args: Vec = env::args().collect();
- let program = args[0].clone();
-
- let mut opts = Options::new();
- opts.optopt("c",
- "config-file",
- "path to config file (optional, default is 'config.yml')",
- "CONFIG");
- opts.optflag("h", "help", "print this help menu");
-
- let matches = match opts.parse(&args[1..]) {
- Ok(m) => m,
- Err(f) => panic!(f.to_string()),
- };
- if matches.opt_present("h") {
- print_usage(&program, opts);
- return;
- }
-
- let (input, table_name) = if matches.free.len() > 1 {
- (matches.free[0].clone(), matches.free[1].clone())
- } else {
- print_usage(&program, opts);
- return;
- };
- //let t_name = matches.opt_str("t");
- //let table_name = t_name.expect("No table name is specified");
- let config_name = matches.opt_str("c");
- import(&input, &table_name, config_name);
-}
diff --git a/utils/indigo-service-uploader/rust/src/sd_import/sd_batch_uploader.rs b/utils/indigo-service-uploader/rust/src/sd_import/sd_batch_uploader.rs
deleted file mode 100644
index 15ac3e7508..0000000000
--- a/utils/indigo-service-uploader/rust/src/sd_import/sd_batch_uploader.rs
+++ /dev/null
@@ -1,64 +0,0 @@
-use sd_parser::SdItem;
-use postgres::Connection;
-use postgres::types::{Type, ToSql};
-
-use postgres_binary_copy::BinaryCopyReader;
-
-static MAX_CAPACITY: u64 = 1 << 23; // 8 MB
-
-/// Postgres Batch Uploader
-/// Uses a cache and binary copy
-pub struct SdBatchUploader<'a> {
- conn: &'a Connection,
- buf: Vec>,
- copy_stmt: String,
- buf_size: u64,
-}
-
-impl<'a> SdBatchUploader<'a> {
- pub fn new(pg_conn: &'a Connection, table_name: &str) -> Result, String> {
- Ok(SdBatchUploader {
- conn: pg_conn,
- buf: Vec::new(),
- copy_stmt: format!("COPY {} (m, p) FROM STDIN BINARY", table_name),
- buf_size: 0,
- })
- }
-
- pub fn upload(&mut self, sd_item: SdItem) {
- self.buf_size += sd_item.mol.as_ref().len() as u64;
- // for (key, value) in sd_item.props.iter() {
- // self.buf_size += key.len() as u64;
- // self.buf_size += value.len() as u64;
- // }
- self.buf.push(sd_item.mol);
- self.buf.push(sd_item.props);
- if self.buf_size > MAX_CAPACITY {
- self.flush();
- }
- }
-
- fn flush(&mut self) {
- if self.buf.len() == 0 {
- return;
- }
- {
- let types = &[Type::Bytea, Type::Jsonb];
- let data = self.buf.iter().map(|v| &**v);
- let mut reader = BinaryCopyReader::new(types, data);
-
- let stmt = self.conn.prepare(&self.copy_stmt).unwrap();
- stmt.copy_in(&[], &mut reader).unwrap();
- }
-
- self.buf.clear();
- self.buf_size = 0;
- }
-}
-
-
-impl<'a> Drop for SdBatchUploader<'a> {
- fn drop(&mut self) {
- self.flush();
- }
-}
diff --git a/utils/indigo-service-uploader/rust/src/sd_import/sd_import.rs b/utils/indigo-service-uploader/rust/src/sd_import/sd_import.rs
deleted file mode 100644
index 614757528d..0000000000
--- a/utils/indigo-service-uploader/rust/src/sd_import/sd_import.rs
+++ /dev/null
@@ -1,173 +0,0 @@
-extern crate postgres;
-extern crate time;
-extern crate num_cpus;
-extern crate threadpool;
-extern crate flate2;
-extern crate yaml_rust;
-extern crate postgres_binary_copy;
-extern crate rustc_serialize;
-
-mod sd_batch_uploader;
-mod sd_parser;
-
-use postgres::{Connection, SslMode, ConnectParams, ConnectTarget, UserInfo};
-use std::collections::BTreeMap;
-use time::PreciseTime;
-use std::sync::mpsc;
-use std::thread;
-use threadpool::ThreadPool;
-use std::io::prelude::*;
-use std::fs::File;
-use flate2::read::GzDecoder;
-use sd_batch_uploader::SdBatchUploader;
-use sd_parser::{SdParser, SdItem};
-use yaml_rust::{Yaml, YamlLoader};
-
-/// Main Library for loading SD files into database
-/// Reads configuration file and performs upload
-pub struct SdImport {
- pub db_config: BTreeMap,
- pub general_config: BTreeMap,
- pub db_conn: Connection,
-}
-
-fn read_config(config_name: &str) -> (BTreeMap, BTreeMap) {
- let mut f_config = File::open(config_name)
- .ok()
- .expect(&format!("Can not open configuration file '{}'", config_name));
- let mut s = String::new();
- f_config.read_to_string(&mut s)
- .ok()
- .expect(&format!("Error while reading configuration file '{}'", config_name));
- let conf = YamlLoader::load_from_str(&s).unwrap();
-
- let mut db_config: BTreeMap = BTreeMap::new();
- let mut general_config: BTreeMap = BTreeMap::new();
-
- for c in conf {
- let h = c.as_hash().unwrap();
-
- let db = h.get(&Yaml::String("database".to_string())).unwrap();
- let db_conf = db.as_hash().unwrap();
-
- for (k, v) in db_conf {
- db_config.insert(k.as_str().unwrap().to_string(),
- v.as_str().unwrap().to_string());
- }
- let general = h.get(&Yaml::String("general".to_string())).expect("no general section in config");
- let general_conf = general.as_hash().unwrap();
-
- for (k, v) in general_conf {
- general_config.insert(k.as_str().unwrap().to_string(),
- v.clone());
- }
- }
- (db_config, general_config)
-}
-
-
-
-impl<'a> SdImport {
- pub fn new(config_name: &str) -> SdImport {
- let (db_conf, general_conf) = read_config(config_name);
-
- let params = ConnectParams {
- target: ConnectTarget::Tcp(db_conf.get("url").unwrap().clone()),
- port: Some(5432),
- user: Some(UserInfo {
- user: db_conf.get("user").unwrap().clone(),
- password: Some(db_conf.get("pass").unwrap().clone()),
- }),
- database: Some(db_conf.get("db").unwrap().clone()),
- options: vec![],
- };
-
- let conn = Connection::connect(params, &SslMode::None).unwrap();
- SdImport {
- db_config: db_conf,
- general_config: general_conf,
- db_conn: conn,
- }
- }
- pub fn insert(&mut self, file_name: &str, table_name: &str) {
- let t_name = format!("{}.{}", self.db_config.get("schema").unwrap(), table_name);
- self.parallel_insert(file_name, &t_name)
- }
- fn parallel_insert(&mut self, file_name: &str, table_name: &str) {
-
- // TODO: move creating table into config
- // let drop_stmt = format!("drop table if exists {}", table_name);
- // let create_stmt = format!("create table {} (id serial, m bytea, p jsonb) ", table_name);
- // conn.execute(&drop_stmt, &[]).ok().expect("Table drop failed");
- // conn.execute(&create_stmt, &[]).ok().expect("Table creation failed");
-
- println!("Start import");
-
- let start_t = PreciseTime::now();
- let mut str_count: u32 = 0;
- let buf_size: usize = self.general_config.get("buffer_size").unwrap().as_i64().unwrap() as usize;
- let (map_send, map_rec) = mpsc::sync_channel(buf_size);
- let (reduce_send, reduce_rec) = mpsc::sync_channel(buf_size);
- let reduce_sender = reduce_send.clone();
- let f_name = file_name.to_string();
-
- // Create parallel reader
- let sd_reader = thread::spawn(move || {
- let pool = ThreadPool::new(num_cpus::get());
- let f = File::open(f_name).unwrap();
- let mut f_str = GzDecoder::new(f).unwrap();
- let parser = SdParser::new(&mut f_str);
-
- for sd in parser {
- reduce_sender.send(1u8).unwrap();
- let map_sender = map_send.clone();
- pool.execute(move || {
- let sd_item = SdItem::new(&sd).unwrap();
- map_sender.send(sd_item).unwrap();
- });
- str_count += 1;
- }
- reduce_sender.send(0u8).unwrap();
- });
-
- // Start upload into database
-
- let sd_uploader = &mut SdBatchUploader::new(&self.db_conn, table_name).unwrap();
- loop {
- let status = reduce_rec.recv().unwrap();
- match status {
- 1u8 => {
- let sd_item: SdItem = map_rec.recv().unwrap();
- sd_uploader.upload(sd_item);
- str_count += 1;
- }
- _ => break,
- }
- }
-
- sd_reader.join().unwrap();
-
- let end_t = PreciseTime::now();
- let timer_ms = start_t.to(end_t).num_milliseconds() as f32 ;
- let timer_s = timer_ms / 1000f32;
-
- println!("Insert total time = {} ms ", timer_ms as i32);
- println!("Average insert time = {} structures per second", ((str_count as f32) / timer_s) as i32);
- println!("Total structures processed = {}", str_count);
-
- }
-
- // fn single_insert() {
- // {
- // let sd_uploader = &mut SdBatchUploader::new(&conn, table_name).unwrap();
- // let f = File::open("data/test-108.sd.gz").unwrap();
- // let mut f_str = GzDecoder::new(f).unwrap();
- // let parser = SdParser::new(&mut f_str);
- // for sd in parser {
- // let sd_item = SdItem::new(sd.as_ref());
- // let sd = sd_item.unwrap();
- // sd_uploader.upload(sd);
- // }
- // }
- // }
-}
diff --git a/utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs b/utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs
deleted file mode 100644
index 3239bd7ef1..0000000000
--- a/utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs
+++ /dev/null
@@ -1,249 +0,0 @@
-extern crate regex;
-
-use std::collections::BTreeMap;
-use rustc_serialize::json::{Json, ToJson};
-use std::io::BufReader;
-use std::io::Lines;
-use std::io::prelude::*;
-use flate2::Compression;
-use flate2::write::GzEncoder;
-
-
-static LINE_ENDING: char = '\n';
-
-// Structure for keeping molecule and properties
-pub struct SdItem {
- pub mol: Box>,
- pub props: Box,
-}
-/// SD files parser
-pub struct SdParser<'a> {
- sd_iter: Lines>,
-}
-
-impl SdItem {
- pub fn new(sd: &str) -> Result {
- let re = regex::Regex::new(">[:space:]*<").unwrap();
- let mut has_mol = false;
- let mut mol: Option = None;
- let mut prop_array: Vec> = Vec::new();
-
- // Iterate properties. First in iteration is a molecule
- for cap in re.split(sd) {
- if has_mol {
- let prop_list = cap.trim();
- let prop_len = prop_list.len();
- let end_idx = prop_list.find(LINE_ENDING).unwrap_or(prop_len);
- let mut key_idx = end_idx;
-
- if end_idx > 0 {
- key_idx -= 1;
- }
-
- let p_name = &cap[0..key_idx];
-
- let mut val_idx = end_idx;
- if val_idx + 1 < prop_len {
- val_idx += 1;
- }
- let p_val = &cap[val_idx..prop_len];
-
- let mut props: BTreeMap = BTreeMap::new();
-
- // Create x,y,a,b representations
- props.insert("x".to_string(), p_name.to_string().to_lowercase().to_json());
- props.insert("y".to_string(), SdItem::read_property(p_val));
- props.insert("a".to_string(), p_name.to_string().to_json());
- props.insert("b".to_string(), p_val.to_string().to_json());
-
- prop_array.push(props);
- } else {
- mol = Some(cap.to_string());
- has_mol = true;
- }
- }
- let res = try!(mol.ok_or("no molecules").and_then(|sd| {
-
- // Gzip molecular structure
- let mut e = GzEncoder::new(Vec::new(), Compression::Default);
- e.write(sd.as_bytes()).ok().expect("Error while writing a molecule");
- let gz_mol = e.finish().unwrap();;
-
- Ok(SdItem {
- mol: Box::new(gz_mol),
- props: Box::new(prop_array.to_json()),
- })
- }));
- Ok(res)
- }
-
- fn read_property(p_val: &str) -> Json {
- match Json::from_str(p_val) {
- Ok(e) => e,
- Err(_) => p_val.to_string().to_json(),
- }
- }
-}
-
-impl<'a> SdParser<'a> {
- pub fn new(input: &'a mut Read) -> SdParser {
- let buf = BufReader::new(input);
- let iter = buf.lines();
- SdParser { sd_iter: iter }
- }
-}
-
-impl<'a> Iterator for SdParser<'a> {
- type Item = String;
- fn next(&mut self) -> Option {
- let mut mol_str = String::new();
-
- // Iterate SD file by $$$$
- for mol in self.sd_iter
- .by_ref()
- .filter_map(|a| a.ok())
- .take_while(|a| !a.starts_with("$$$$")) {
- mol_str.push_str(mol.as_ref());
- mol_str.push(LINE_ENDING);
- }
- return match mol_str.len() {
- 0 => None,
- _ => Some(mol_str),
- };
- }
-}
-
-#[cfg(test)]
-mod tests {
- static LINE_ENDING: char = '\n';
- use std::fs::File;
- use sd_parser::{SdItem, SdParser};
- use flate2::read::GzDecoder;
- use std::collections::BTreeSet;
- #[test]
- fn test_all_properties_parsed() {
- let sd_item = SdItem::new("975001
- -OEChem-05211109542D
-
- 45 47 0 0 0 0 0 0 \
- 0999 V2000
- 25 45 1 0 0 0 0
-M END
->
-\
- 975001
-
->
-1
-
->
-\
- 426");
-
- let sd = &sd_item.unwrap();
- assert!(sd.props.is_array());
- let p_array = sd.props.as_array().unwrap();
- let mut sd_properties: BTreeSet = BTreeSet::new();
-
- for p in p_array {
- let a = p.find("a").unwrap();
- sd_properties.insert(a.as_string().unwrap().to_string());
- }
-
- assert!(sd_properties.contains("COMPOUND_CANONICALIZED"));
- assert!(sd_properties.contains("COMPOUND_CID"));
- assert!(sd_properties.contains("CACTVS_COMPLEXITY"));
- }
-
- #[test]
- fn test_sd_parser_basic() {
- let mut f = File::open("../data/test_pubchem_10.sdf").unwrap();
- let parser = &mut SdParser::new(&mut f);
- assert_eq!(10, parser.count());
- }
- #[test]
- fn test_sd_parse_options() {
- let mut f = File::open("../data/test_pubchem_10.sdf").unwrap();
- let parser = &mut SdParser::new(&mut f);
-
- let sd = parser.next().unwrap();
- let sd_item = SdItem::new(sd.as_ref()).unwrap();
-
- assert!(sd_item.props.is_array());
- let p_array = sd_item.props.as_array().unwrap();
- for p in p_array {
- let a = p.find("a").unwrap().as_string().unwrap();
- let b = p.find("b").unwrap().as_string().unwrap();
- assert!(!b.starts_with(LINE_ENDING));
-
- if a.contains("PUBCHEM_EXACT_MASS") {
- assert!(p.find("y").unwrap().is_number());
- }
- }
- }
-
- #[test]
- fn test_basic_sd_iterator() {
- let mut f = File::open("../data/test_pubchem_10.sdf").unwrap();
- let parser = SdParser::new(&mut f);
- let mut p_size: usize = 0;
- for sd in parser {
- let sd_item = SdItem::new(sd.as_ref());
- let sd = &sd_item.unwrap();
- assert!(sd.props.is_array());
- p_size += sd.props.as_array().unwrap().len();
- }
- assert_eq!(325, p_size);
- }
- #[test]
- fn test_sd_scope_18() {
- let f = File::open("../data/test-18.sd.gz").unwrap();
- let mut f_str = GzDecoder::new(f).unwrap();
- let parser = SdParser::new(&mut f_str);
- let mut p_size: usize = 0;
- let mut m_size: usize = 0;
- for sd in parser {
- let sd_item = SdItem::new(sd.as_ref());
- let sd = &sd_item.unwrap();
- assert!(sd.props.is_array());
- p_size += sd.props.as_array().unwrap().len();
- m_size += 1;
- }
- assert_eq!(18, m_size);
- assert_eq!(576, p_size);
- }
- #[test]
- fn test_sd_scope_108() {
- let f = File::open("../data/test-108.sd.gz").unwrap();
- let mut f_str = GzDecoder::new(f).unwrap();
- let parser = SdParser::new(&mut f_str);
- let mut p_size: usize = 0;
- let mut m_size: usize = 0;
- for sd in parser {
- let sd_item = SdItem::new(sd.as_ref());
- let sd = &sd_item.unwrap();
- assert!(sd.props.is_array());
- p_size += sd.props.as_array().unwrap().len();
- m_size += 1;
- }
- assert_eq!(108, m_size);
- assert_eq!(3456, p_size);
- }
- #[test]
- fn test_sd_scope_2759() {
- let f = File::open("../data/test-2759.sd.gz").unwrap();
- let mut f_str = GzDecoder::new(f).unwrap();
- let parser = SdParser::new(&mut f_str);
- let mut p_size: usize = 0;
- let mut m_size: usize = 0;
- for sd in parser {
- let sd_item = SdItem::new(sd.as_ref());
- let sd = &sd_item.unwrap();
- assert!(sd.props.is_array());
- p_size += sd.props.as_array().unwrap().len();
- m_size += 1;
- }
- assert_eq!(2759, m_size);
- assert_eq!(8277, p_size);
- }
-}
diff --git a/utils/indigo-service-uploader/rust/tests/config.yml b/utils/indigo-service-uploader/rust/tests/config.yml
deleted file mode 100644
index d949b9fdce..0000000000
--- a/utils/indigo-service-uploader/rust/tests/config.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-database:
- url: localhost
- db: indigoservice
- user: indigoservice
- pass: p@ssw0rd
- schema: indigoservice
-table:
- structure_column: m
- properties_column: p
\ No newline at end of file
diff --git a/utils/indigo-service-uploader/rust/tests/test_upload.rs b/utils/indigo-service-uploader/rust/tests/test_upload.rs
deleted file mode 100644
index b405d4c068..0000000000
--- a/utils/indigo-service-uploader/rust/tests/test_upload.rs
+++ /dev/null
@@ -1,100 +0,0 @@
-extern crate postgres;
-extern crate flate2;
-extern crate sd_import;
-
-
-use postgres::types::FromSql;
-use postgres::Connection;
-use postgres::Result as PgResult;
-use sd_import::SdImport;
-
-fn get_single_value(conn: &Connection, query: &str) -> PgResult
- where T: FromSql
-{
- println!("Executing query: {}", query);
- let stmt = try!(conn.prepare(query));
- let rows = try!(stmt.query(&[]));
- let row = rows.iter().next().unwrap();
- row.get_opt(0)
-}
-
-fn drop_create_table(conn: &Connection, table_name: &str) {
- let drop_stmt = format!("drop table if exists {}", table_name);
- let create_stmt = format!("create table {} (id serial, m bytea, p jsonb) ", table_name);
-
- conn.execute(&drop_stmt, &[]).ok().expect("Table drop failed");
- conn.execute(&create_stmt, &[]).ok().expect("Table creation failed");
-}
-
-fn get_query_count(conn: &Connection, query: String) -> i64 {
- get_single_value::(&conn, &query).unwrap()
-}
-
-#[test]
-fn test_basic_upload() {
- let mut sd_import = SdImport::new("tests/config.yml");
-
- let table_name = "test_indigo_upload";
-
- // Test basic upload
- drop_create_table(&sd_import.db_conn, table_name);
-
- sd_import.insert("../data/test-108.sd.gz", table_name);
-
- {
- let t_count = get_query_count(&sd_import.db_conn,
- format!("select count (*) from {}", table_name));
- assert_eq!(108, t_count);
- }
-
- // Test pubchem insert
- drop_create_table(&sd_import.db_conn, table_name);
-
- sd_import.insert("../data/test_pubchem_10.sd.gz", table_name);
-
- {
- let t_count = get_query_count(&sd_import.db_conn,
- format!("select count(*) from {} \
- ,jsonb_array_elements(p) elems where elems->>'x' \
- like '%mass%' and (elems->>'y')::float > 300",
- table_name));
- assert_eq!(6, t_count);
- }
- // Test maybridge
- drop_create_table(&sd_import.db_conn, table_name);
-
- sd_import.insert("../data/maybridge-stardrop-sample.sd.gz", table_name);
-
- {
- let t_count = get_query_count(&sd_import.db_conn,
- format!("select count(*) from {} \
- ,jsonb_array_elements(p) elems where elems->>'x' like '%logp%' and (elems->>'y')::float > 5",
- table_name));
- assert_eq!(16, t_count);
- }
- {
- let t_count = get_query_count(&sd_import.db_conn,
- format!("select count (*) from {}", table_name));
- assert_eq!(108, t_count);
- }
-
- // Test floats
- drop_create_table(&sd_import.db_conn, table_name);
-
- sd_import.insert("../data/test-18-floats.sd.gz", table_name);
-
- {
- let t_count = get_query_count(&sd_import.db_conn,
- format!("select count(*) from {} \
- ,jsonb_array_elements(p) elems where elems->>'x' like '%logs%' and (elems->>'y')::float > 0.5",
- table_name));
- assert_eq!(36, t_count);
- }
- {
- let t_count = get_query_count(&sd_import.db_conn,
- format!("select count(*) from {} \
- ,jsonb_array_elements(p) elems where elems->>'x' like '%logs%' and (elems->>'y')::float > 1",
- table_name));
- assert_eq!(26, t_count);
- }
-}
diff --git a/utils/indigo-service/CMakeLists.txt b/utils/indigo-service/CMakeLists.txt
index f761475712..611af5dc10 100644
--- a/utils/indigo-service/CMakeLists.txt
+++ b/utils/indigo-service/CMakeLists.txt
@@ -11,9 +11,9 @@ else()
message(WARNING "Docker executable not found, skipping building indigo-service Docker image")
else ()
add_custom_target(${PROJECT_NAME}
- COMMAND ${CMAKE_COMMAND} -E copy ${NATIVE_DIST_DIRECTORY}/epam.indigo-*-manylinux1_x86_64.whl ${CMAKE_CURRENT_SOURCE_DIR}/lib/
- COMMAND docker build -f service/Dockerfile . -t epmlsop/indigo-service:latest
- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+ COMMAND ${CMAKE_COMMAND} -E copy ${NATIVE_DIST_DIRECTORY}/epam.indigo-*-manylinux1_x86_64.whl ${CMAKE_CURRENT_SOURCE_DIR}/backend/lib/
+ COMMAND docker build -f ./Dockerfile . -t epmlsop/indigo-service:latest
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/backend
DEPENDS indigo-python
)
endif()
diff --git a/utils/indigo-service/LICENSE b/utils/indigo-service/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/utils/indigo-service/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/utils/indigo-service/backend/Dockerfile b/utils/indigo-service/backend/Dockerfile
new file mode 100644
index 0000000000..3468f24d0d
--- /dev/null
+++ b/utils/indigo-service/backend/Dockerfile
@@ -0,0 +1,59 @@
+FROM ubuntu:22.04
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt update && \
+ apt upgrade -y
+RUN apt install -y --no-install-recommends \
+ ca-certificates \
+ curl \
+ libfreetype6-dev \
+ libfontconfig1-dev \
+ python3 \
+ python3-pip \
+ python3-wheel \
+ python3-setuptools \
+ redis-server \
+ supervisor \
+ unzip \
+ uwsgi \
+ uwsgi-plugin-python3
+
+# Install python dependencies using pip
+COPY ./service/requirements.txt /opt/
+RUN pip3 install --no-cache-dir -r /opt/requirements.txt
+
+# Setup celery
+COPY ./conf/celeryd.conf /etc/default/celeryd
+RUN useradd -ms /bin/bash celery || echo "User already exists."
+RUN chmod 640 /etc/default/celeryd
+COPY ./conf/celery.auto.conf /etc/supervisor/conf.d/
+# Setup redis
+COPY ./conf/redis.auto.conf /etc/supervisor/conf.d/
+# Setup uwsgi runner
+COPY ./conf/uwsgi.ini /etc/uwsgi.ini
+COPY ./conf/uwsgi.auto.conf /etc/supervisor/conf.d/
+
+# Install Indigo
+COPY ./lib/* /opt/
+RUN if [ -f /opt/*indigo*manylinux1_x86_64*.whl ]; then python3 -m pip install --upgrade /opt/*indigo*manylinux1_x86_64*.whl; rm /opt/*.whl; fi
+
+# Install Imago
+# COPY ./lib/imago-console-*.7z /opt/
+# RUN cd /opt && \
+# mkdir /srv/imago && \
+# for imago_zip in $(cat /srv/service_version); do case "$imago_zip" in *imago*) 7z x $imago_zip; mv imago-console*/ /srv/imago/$(basename $imago_zip .7z)/ ;; esac; done
+# RUN chmod -R a+rx /srv/imago
+
+# COPY ./lib/favicon.ico /srv/api/ui/
+COPY ./service/v2/ /srv/api/v2/
+COPY ./service/*.py /srv/api/
+
+# Clean
+RUN apt purge -y unzip git python3-pip python3-wheel && \
+ apt autoremove -y && \
+ rm -rf /opt/* /var/lib/apt/lists/*
+
+EXPOSE 80
+WORKDIR /srv/api
+CMD gunicorn --bind 0.0.0.0:80 --workers=$(nproc) app:app
diff --git a/utils/indigo-service/celery/celery.auto.conf b/utils/indigo-service/backend/conf/celery.auto.conf
similarity index 100%
rename from utils/indigo-service/celery/celery.auto.conf
rename to utils/indigo-service/backend/conf/celery.auto.conf
diff --git a/utils/indigo-service/celery/celeryd.conf b/utils/indigo-service/backend/conf/celeryd.conf
similarity index 100%
rename from utils/indigo-service/celery/celeryd.conf
rename to utils/indigo-service/backend/conf/celeryd.conf
diff --git a/utils/indigo-service/celery/redis.auto.conf b/utils/indigo-service/backend/conf/redis.auto.conf
similarity index 74%
rename from utils/indigo-service/celery/redis.auto.conf
rename to utils/indigo-service/backend/conf/redis.auto.conf
index 0a0bf6b850..457f1c2538 100644
--- a/utils/indigo-service/celery/redis.auto.conf
+++ b/utils/indigo-service/backend/conf/redis.auto.conf
@@ -4,4 +4,4 @@ autostart=true
autorestart=true
user=root
stdout_logfile=/var/log/redis/stdout.log
-stderr_logfile=/var/log/redis/stderr.log
\ No newline at end of file
+stderr_logfile=/var/log/redis/stderr.log
diff --git a/utils/indigo-service/backend/conf/uwsgi.auto.conf b/utils/indigo-service/backend/conf/uwsgi.auto.conf
new file mode 100644
index 0000000000..5501a44d86
--- /dev/null
+++ b/utils/indigo-service/backend/conf/uwsgi.auto.conf
@@ -0,0 +1,11 @@
+[program:uwsgi]
+directory=/srv/api
+
+command=uwsgi --plugin=python3 /etc/uwsgi.ini
+autostart=true
+autorestart=true
+
+redirect_stderr=true
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+killasgroup=true
diff --git a/utils/indigo-service/uwsgi/uwsgi.ini b/utils/indigo-service/backend/conf/uwsgi.ini
similarity index 76%
rename from utils/indigo-service/uwsgi/uwsgi.ini
rename to utils/indigo-service/backend/conf/uwsgi.ini
index 663229144a..f8159cf90f 100644
--- a/utils/indigo-service/uwsgi/uwsgi.ini
+++ b/utils/indigo-service/backend/conf/uwsgi.ini
@@ -5,9 +5,6 @@ workers = 4
no-orphans = true
log-date = true
harakiri = 60
-# user/group identifier of uWSGI processes
-# uid = www-data
-# gid = www-data
enable-threads = true
thunder-lock = true
diff --git a/utils/indigo-service/backend/lib/README.md b/utils/indigo-service/backend/lib/README.md
new file mode 100644
index 0000000000..d5bae4d117
--- /dev/null
+++ b/utils/indigo-service/backend/lib/README.md
@@ -0,0 +1,2 @@
+Service folder for building indigo-service with local version of Indigo.
+The wheel file should be put here.
diff --git a/utils/indigo-service/service/app.py b/utils/indigo-service/backend/service/app.py
similarity index 69%
rename from utils/indigo-service/service/app.py
rename to utils/indigo-service/backend/service/app.py
index cf9514375c..94e9a4014b 100644
--- a/utils/indigo-service/service/app.py
+++ b/utils/indigo-service/backend/service/app.py
@@ -8,32 +8,20 @@
from flask import Flask
from werkzeug.serving import run_simple
-# from v2.db.database import db_session
-# from v2.imago_api import imago_api
from v2.common_api import common_api
-
-# from v2.libraries_api import libraries_api
+from v2.db.database import db_session
+from v2.imago_api import imago_api
from v2.indigo_api import indigo_api
-
-# def is_indigo_db():
-# try:
-# import socket
-# socket.gethostbyname('indigo_db')
-# return True
-# except:
-# return False
-
+from v2.libraries_api import libraries_api
app = Flask(__name__)
app.config.from_pyfile("config.py")
-# if is_indigo_db():
-# app.register_blueprint(libraries_api, url_prefix='/v2/libraries')
+app.register_blueprint(libraries_api, url_prefix="/v2/libraries")
app.register_blueprint(indigo_api, url_prefix="/v2/indigo")
-# app.register_blueprint(imago_api, url_prefix='/v2/imago')
+app.register_blueprint(imago_api, url_prefix="/v2/imago")
app.register_blueprint(common_api, url_prefix="/v2")
swagger = Swagger(app)
-# logging.basicConfig(, level=logging.INFO)
logging.basicConfig(
stream=sys.stdout,
format=u"[%(asctime)s: %(levelname)-8s/%(filename)s:%(lineno)d] %(message)s",
@@ -52,9 +40,9 @@ def run_server(port):
)
-# @app.teardown_appcontext
-# def shutdown_session(exception=None):
-# db_session.remove()
+@app.teardown_appcontext
+def shutdown_session(exception=None):
+ db_session.remove()
if __name__ == "__main__":
diff --git a/utils/indigo-service/service/config.py b/utils/indigo-service/backend/service/config.py
similarity index 63%
rename from utils/indigo-service/service/config.py
rename to utils/indigo-service/backend/service/config.py
index 29559b977b..9591db0eaa 100644
--- a/utils/indigo-service/service/config.py
+++ b/utils/indigo-service/backend/service/config.py
@@ -1,11 +1,12 @@
# Bingo config
+import os
BINGO_POSTGRES = {
- "host": "indigo_db",
+ "host": "db",
"port": "5432",
- "database": "indigoservice",
- "user": "indigoservice",
- "password": "p@ssw0rd",
+ "database": "postgres",
+ "user": "postgres",
+ "password": os.environ["POSTGRES_PASSWORD"],
}
# Flask config
@@ -15,16 +16,15 @@
ALLOWED_EXTENSIONS = ("sdf", "sd", "gz")
# Celery config
-
-CELERY_BROKER_URL = "redis://localhost:6379/0"
-CELERY_RESULT_BACKEND = "redis://localhost:6379/0"
-# CELERY_IMPORTS = ('v2.imago_api', 'v2.libraries_api')
-CELERY_ACCEPT_CONTENT = ("json",)
-CELERY_TASK_SERIALIZER = "json"
-CELERY_RESULT_SERIALIZER = "json"
-CELERY_ENABLE_UTC = True
-CELERY_TIMEZONE = "Etc/UTC"
-CELERYD_POOL = "prefork"
+CELERY_broker_url = "redis://localhost:6379/0"
+result_backend = "redis://localhost:6379/0"
+imports = ("v2.imago_api", "v2.libraries_api")
+accept_content = ("json",)
+task_serializer = "json"
+result_serializer = "json"
+CELERY_enable_utc = True
+timezone = "Etc/UTC"
+worker_pool = "prefork"
# Logging option
LOG_LEVEL = "INFO"
@@ -48,7 +48,6 @@
# imago config
-
ALLOWED_TYPES = (
"image/png",
"image/jpeg",
diff --git a/utils/indigo-service/service/pyproject.toml b/utils/indigo-service/backend/service/pyproject.toml
similarity index 100%
rename from utils/indigo-service/service/pyproject.toml
rename to utils/indigo-service/backend/service/pyproject.toml
diff --git a/utils/indigo-service/service/test-requirements.txt b/utils/indigo-service/backend/service/requirements.dev.txt
similarity index 100%
rename from utils/indigo-service/service/test-requirements.txt
rename to utils/indigo-service/backend/service/requirements.dev.txt
diff --git a/utils/indigo-service/backend/service/requirements.txt b/utils/indigo-service/backend/service/requirements.txt
new file mode 100644
index 0000000000..2ddc3221d1
--- /dev/null
+++ b/utils/indigo-service/backend/service/requirements.txt
@@ -0,0 +1,12 @@
+celery
+epam.indigo
+flasgger
+Flask
+Flask-HTTPAuth
+flask-restful
+gunicorn
+marshmallow
+pyparsing
+psycopg2-binary
+redis
+SQLAlchemy
diff --git a/utils/indigo-service/service/test/Dockerfile b/utils/indigo-service/backend/service/tests/Dockerfile
similarity index 100%
rename from utils/indigo-service/service/test/Dockerfile
rename to utils/indigo-service/backend/service/tests/Dockerfile
diff --git a/utils/indigo-service/service/test/api/__init__.py b/utils/indigo-service/backend/service/tests/api/__init__.py
similarity index 100%
rename from utils/indigo-service/service/test/api/__init__.py
rename to utils/indigo-service/backend/service/tests/api/__init__.py
diff --git a/utils/indigo-service/service/test/api/common_test.py b/utils/indigo-service/backend/service/tests/api/common_test.py
similarity index 100%
rename from utils/indigo-service/service/test/api/common_test.py
rename to utils/indigo-service/backend/service/tests/api/common_test.py
diff --git a/utils/indigo-service/service/test/api/imago_test.py b/utils/indigo-service/backend/service/tests/api/imago_test.py
similarity index 100%
rename from utils/indigo-service/service/test/api/imago_test.py
rename to utils/indigo-service/backend/service/tests/api/imago_test.py
diff --git a/utils/indigo-service/service/test/api/indigo_test.py b/utils/indigo-service/backend/service/tests/api/indigo_test.py
similarity index 100%
rename from utils/indigo-service/service/test/api/indigo_test.py
rename to utils/indigo-service/backend/service/tests/api/indigo_test.py
diff --git a/utils/indigo-service/service/test/api/library_test.py b/utils/indigo-service/backend/service/tests/api/library_test.py
similarity index 100%
rename from utils/indigo-service/service/test/api/library_test.py
rename to utils/indigo-service/backend/service/tests/api/library_test.py
diff --git a/utils/indigo-service/service/test/data/issue_75.sdf b/utils/indigo-service/backend/service/tests/data/issue_75.sdf
similarity index 100%
rename from utils/indigo-service/service/test/data/issue_75.sdf
rename to utils/indigo-service/backend/service/tests/data/issue_75.sdf
diff --git a/utils/indigo-service/service/test/data/maybridge-stardrop-sample.sdf b/utils/indigo-service/backend/service/tests/data/maybridge-stardrop-sample.sdf
similarity index 100%
rename from utils/indigo-service/service/test/data/maybridge-stardrop-sample.sdf
rename to utils/indigo-service/backend/service/tests/data/maybridge-stardrop-sample.sdf
diff --git a/utils/indigo-service/service/test/data/test-108.sd.gz b/utils/indigo-service/backend/service/tests/data/test-108.sd.gz
similarity index 100%
rename from utils/indigo-service/service/test/data/test-108.sd.gz
rename to utils/indigo-service/backend/service/tests/data/test-108.sd.gz
diff --git a/utils/indigo-service/service/test/data/test-18.sd.gz b/utils/indigo-service/backend/service/tests/data/test-18.sd.gz
similarity index 100%
rename from utils/indigo-service/service/test/data/test-18.sd.gz
rename to utils/indigo-service/backend/service/tests/data/test-18.sd.gz
diff --git a/utils/indigo-service/service/test/data/test_config_imago.inc b/utils/indigo-service/backend/service/tests/data/test_config_imago.inc
similarity index 97%
rename from utils/indigo-service/service/test/data/test_config_imago.inc
rename to utils/indigo-service/backend/service/tests/data/test_config_imago.inc
index 855515d294..54469311fe 100644
--- a/utils/indigo-service/service/test/data/test_config_imago.inc
+++ b/utils/indigo-service/backend/service/tests/data/test_config_imago.inc
@@ -1,208 +1,208 @@
-_configVersion = 101;
-characters.DistanceAbsolutelySure = 1.582848;
-characters.DistanceScaleFactor = 86.671942;
-characters.HeightMaxBound = 1.969968;
-characters.HeightMinBound = 0.306988;
-characters.InternalBinarizationThreshold = 183;
-characters.MinEndpointsPossible = 0;
-characters.MinimalRecognizableHeight = 7;
-characters.PossibleCharacterDistanceStrong = 2.824592;
-characters.PossibleCharacterDistanceWeak = 2.871944;
-characters.PossibleCharacterMinimalQuality = 0.077092;
-characters.RatioDiffThresh = 0.662342;
-characters.ReestimateMinimalCharacters = 4;
-csr.DeleteBadTriangles = 1.909633;
-csr.Dissolve = 0.475037;
-csr.LineVectorizationFactor = 1.517119;
-csr.ReconnectMinBads = 3;
-csr.ReconnectProbablyGoodCoef = 0.413864;
-csr.ReconnectSurelyBadCoef = 1.974586;
-csr.ReconnectSurelyGoodCoef = 1.022164;
-csr.RescaleImageDimensions = 1280;
-csr.SmallImageDim = 251;
-csr.StableDecorner = 0;
-csr.UseDPApproximator = 0;
-csr.WeakSegmentatorDist = 1;
-estimation.CapitalHeightError = 0.710637;
-estimation.CharactersSpaceCoeff = 0.323553;
-estimation.DoubleBondDist = 29;
-estimation.MaxSymRatio = 1.107658;
-estimation.MaxSymbolHeightPercentsOfImage = 0.147136;
-estimation.MinSymRatio = 0.260041;
-estimation.ParLinesEps = 0.507035;
-estimation.SegmentVerEps = 8;
-estimation.SymHeightErr = 30.256023;
-general.ClusterIndex = 3;
-general.ImageAlreadyBinarized = 1;
-graph.MinimalDistTresh = 2.455535;
-graph.RatioSub = 0.876111;
-graph.RatioTresh = 0.540946;
-lab_remover.CenterShiftMax = 6.271216;
-lab_remover.HeightFactor = 1.167291;
-lab_remover.MaxCapitalHeight = 36;
-lab_remover.MaxLabelLines = 5;
-lab_remover.MinCapitalHeight = 8;
-lab_remover.MinLabelChars = 1;
-lab_remover.MinimalDensity = 0.105134;
-lab_remover.PixGapX = 2;
-lab_remover.PixGapY = 10;
-labels.adjustAttemptsCount = 1;
-labels.adjustDec = 1.376909;
-labels.adjustInc = 0.815811;
-labels.capitalAdjustFactor = 0.488046;
-labels.ratioCapital = 0.898600;
-labels.ratioWeight = 0.250038;
-labels.underlinePos = 0.413279;
-labels.weightUnderline = 0.255530;
-lcomb.MaximalDistanceFactor = 0.547562;
-lcomb.MaximalYDistanceFactor = 0.195516;
-main.DissolvingsFactor = 12;
-main.MinGoodCharactersSize = 11;
-main.WarningsForTooSmallCharacters = 2;
-main.WarningsRecalcTreshold = 0;
-mbond.Case1Factor = 0.342732;
-mbond.Case1LengthTresh = 104.665669;
-mbond.Case2Factor = 0.702378;
-mbond.Case2LengthTresh = 72.378313;
-mbond.Case3Factor = 1.207533;
-mbond.DefaultErr = 0.574503;
-mbond.DoubleCoef = 0.073200;
-mbond.DoubleLeftLengthTresh = 0.379831;
-mbond.DoubleMagic1 = 0.790503;
-mbond.DoubleMagic2 = 0.786827;
-mbond.DoubleRatioTresh = 8.575122;
-mbond.DoubleRightLengthTresh = 0.339834;
-mbond.DoubleTreshMax = 0.722373;
-mbond.DoubleTreshMin = 0.107749;
-mbond.LongBond = 98.216747;
-mbond.LongErr = 0.294706;
-mbond.MaxLen1 = 131.757683;
-mbond.MaxLen2 = 146.842210;
-mbond.MaxLen3 = 132.511245;
-mbond.MaxLen4 = 139.288255;
-mbond.MaxLen5 = 74.206661;
-mbond.MediumBond = 89.440360;
-mbond.MediumErr = 0.434682;
-mbond.MinLen1 = 78.625771;
-mbond.MinLen2 = 74.633694;
-mbond.ParBondsEps = 0.396634;
-mbond.TripleLeftLengthTresh = 0.261395;
-mbond.TripleRightLengthTresh = 0.234413;
-mbond.mbe1 = 0.076672;
-mbond.mbe2 = 0.181879;
-mbond.mbe3 = 0.160493;
-mbond.mbe4 = 0.136906;
-mbond.mbe5 = 0.154427;
-mbond.mbe6 = 0.175280;
-mbond.mbe7 = 0.400131;
-mbond.mbe_def = 0.631757;
-molecule.AngleTreshold = 0.257278;
-molecule.LengthFactor_default = 0.498034;
-molecule.LengthFactor_long = 0.331387;
-molecule.LengthFactor_medium = 0.385115;
-molecule.LengthValue_long = 108.533391;
-molecule.LengthValue_medium = 91.201845;
-molecule.SpaceMultiply = 1.150569;
-p_estimator.ApriorProb4SymbolCase = 0.692460;
-p_estimator.DefaultApriority = 0.502905;
-p_estimator.LogisticLocation = 0.961426;
-p_estimator.LogisticScale = 0.989250;
-p_estimator.MinRatio2ConsiderGrPr = 0.259882;
-p_estimator.UsePerimeterNormalization = 0;
-prefilterCV.BinarizerFrameGap = 1;
-prefilterCV.BinarizerThreshold = 169;
-prefilterCV.BorderPartProportion = 41;
-prefilterCV.MaxBadToGoodRatio = 2;
-prefilterCV.MaxNonBWPixelsProportion = 15;
-prefilterCV.MaxRectangleCropLineWidth = 21;
-prefilterCV.MaxRectangleCropLineWidthAlreadyBinarized = 0;
-prefilterCV.MinGoodPixelsCount = 17;
-prefilterCV.OtsuThresholdValue = 130;
-prefilterCV.StrongBinarizeSize = 4;
-prefilterCV.StrongBinarizeTresh = 1.313146;
-prefilterCV.UseOtsuPixelsAddition = 0;
-prefilterCV.WeakBinarizeSize = 5;
-prefilterCV.WeakBinarizeTresh = 0.943737;
-retinex.ContrastDropPercentage = 0.038246;
-retinex.ContrastNominal = 140;
-retinex.EndIteration = 13;
-retinex.IterationStep = 5;
-retinex.StartIteration = 3;
-routines.Algebra_IntersectionEps = 0.013238;
-routines.Algebra_SameLineEps = 0.214315;
-routines.Circle_AsCharFactor = 1.086772;
-routines.Circle_GapAngleMax = 0.219033;
-routines.Circle_GapRadiusMax = 1.306270;
-routines.Circle_MaxDeviation = 0.010233;
-routines.Circle_MinRadius = 6.227803;
-routines.LineThick_Grid = 10;
-separator.UseVoteArray = 0;
-separator.capHeightMax = 1.626769;
-separator.capHeightMin = 0.530470;
-separator.capHeightRatio = 0.345944;
-separator.capHeightRatio2 = 2.394881;
-separator.ext2charRatio = 1.680010;
-separator.extCapHeightMax = 2.199233;
-separator.extCapHeightMin = 0.269385;
-separator.extRatioMax = 1.549786;
-separator.extRatioMin = 0.296248;
-separator.gdConst = 1.454894;
-separator.getRatio1 = 0.926927;
-separator.getRatio2 = 1.213728;
-separator.hu_0_1 = 0.169181;
-separator.hu_0_2 = 0.219739;
-separator.hu_1_1 = 0.213581;
-separator.hu_1_2 = 0.090455;
-separator.hu_1_3 = 0.100023;
-separator.ltFactor1 = 0;
-separator.maxDensity = 0.899635;
-separator.minApproxSegsStrong = 1;
-separator.minApproxSegsWeak = 7;
-separator.minDensity = 0.191093;
-separator.specialSegmentsTreat = 10;
-separator.testSlashLine1 = 2.831270;
-separator.testSlashLine2 = 2.436796;
-skeleton.BaseMult = 0.120864;
-skeleton.BaseSmallErr = 0.117512;
-skeleton.BrokenRepairAngleEps = 0.186189;
-skeleton.BrokenRepairCoef1 = 1.274116;
-skeleton.BrokenRepairCoef2 = 3.242707;
-skeleton.BrokenRepairFactor = 3.286455;
-skeleton.ConnectBlockS = 10.799448;
-skeleton.ConnectFactor = 2.412895;
-skeleton.Dissolve2Const = 0.227941;
-skeleton.DissolveConst = 0.108697;
-skeleton.DissolveMinErr = 0.177484;
-skeleton.DistTreshLimFactor = 1.974879;
-skeleton.Join2Const = 0.240045;
-skeleton.Join3Const = 0.147932;
-skeleton.JoinVerticiesConst = 0.098870;
-skeleton.LongBondLen = 99.112591;
-skeleton.LongMul = 0.138084;
-skeleton.LongSmallErr = 0.080023;
-skeleton.MediumBondLen = 108.668327;
-skeleton.MediumMul = 0.179226;
-skeleton.MediumSmallErr = 0.107658;
-skeleton.ShortBondLen = 26.963686;
-skeleton.ShortMul = 0.295381;
-skeleton.ShrinkEps = 0.151628;
-skeleton.SlopeFact1 = 0.088580;
-skeleton.SlopeFact2 = 0.190300;
-utils.SlashLineDensity = 0.439441;
-wbe.MinimalSingleDownSegsCount = 4;
-wbe.PointsCompareDist = 1;
-wbe.SingleDownAngleMax = 50.229562;
-wbe.SingleDownCompareDist = 1;
-wbe.SingleDownDistancesMax = 10.900421;
-wbe.SingleDownEps = 2.398372;
-wbe.SingleDownLengthMax = 50.722094;
-wbe.SingleUpDefCoeff = 0.284690;
-wbe.SingleUpIncCoeff = 0.378218;
-wbe.SingleUpIncLengthTresh = 37.168361;
-wbe.SingleUpInterpolateEps = 0.209761;
-wbe.SingleUpMagicAddition = 0.206429;
-wbe.SingleUpSlopeThresh = 0.116888;
-wbe.SingleUpThickThresh = 1.614505;
-wbe.SomeTresh = 0.126759;
-weak_seg.RectangularCropAreaTreshold = 0.339871;
+_configVersion = 101;
+characters.DistanceAbsolutelySure = 1.582848;
+characters.DistanceScaleFactor = 86.671942;
+characters.HeightMaxBound = 1.969968;
+characters.HeightMinBound = 0.306988;
+characters.InternalBinarizationThreshold = 183;
+characters.MinEndpointsPossible = 0;
+characters.MinimalRecognizableHeight = 7;
+characters.PossibleCharacterDistanceStrong = 2.824592;
+characters.PossibleCharacterDistanceWeak = 2.871944;
+characters.PossibleCharacterMinimalQuality = 0.077092;
+characters.RatioDiffThresh = 0.662342;
+characters.ReestimateMinimalCharacters = 4;
+csr.DeleteBadTriangles = 1.909633;
+csr.Dissolve = 0.475037;
+csr.LineVectorizationFactor = 1.517119;
+csr.ReconnectMinBads = 3;
+csr.ReconnectProbablyGoodCoef = 0.413864;
+csr.ReconnectSurelyBadCoef = 1.974586;
+csr.ReconnectSurelyGoodCoef = 1.022164;
+csr.RescaleImageDimensions = 1280;
+csr.SmallImageDim = 251;
+csr.StableDecorner = 0;
+csr.UseDPApproximator = 0;
+csr.WeakSegmentatorDist = 1;
+estimation.CapitalHeightError = 0.710637;
+estimation.CharactersSpaceCoeff = 0.323553;
+estimation.DoubleBondDist = 29;
+estimation.MaxSymRatio = 1.107658;
+estimation.MaxSymbolHeightPercentsOfImage = 0.147136;
+estimation.MinSymRatio = 0.260041;
+estimation.ParLinesEps = 0.507035;
+estimation.SegmentVerEps = 8;
+estimation.SymHeightErr = 30.256023;
+general.ClusterIndex = 3;
+general.ImageAlreadyBinarized = 1;
+graph.MinimalDistTresh = 2.455535;
+graph.RatioSub = 0.876111;
+graph.RatioTresh = 0.540946;
+lab_remover.CenterShiftMax = 6.271216;
+lab_remover.HeightFactor = 1.167291;
+lab_remover.MaxCapitalHeight = 36;
+lab_remover.MaxLabelLines = 5;
+lab_remover.MinCapitalHeight = 8;
+lab_remover.MinLabelChars = 1;
+lab_remover.MinimalDensity = 0.105134;
+lab_remover.PixGapX = 2;
+lab_remover.PixGapY = 10;
+labels.adjustAttemptsCount = 1;
+labels.adjustDec = 1.376909;
+labels.adjustInc = 0.815811;
+labels.capitalAdjustFactor = 0.488046;
+labels.ratioCapital = 0.898600;
+labels.ratioWeight = 0.250038;
+labels.underlinePos = 0.413279;
+labels.weightUnderline = 0.255530;
+lcomb.MaximalDistanceFactor = 0.547562;
+lcomb.MaximalYDistanceFactor = 0.195516;
+main.DissolvingsFactor = 12;
+main.MinGoodCharactersSize = 11;
+main.WarningsForTooSmallCharacters = 2;
+main.WarningsRecalcTreshold = 0;
+mbond.Case1Factor = 0.342732;
+mbond.Case1LengthTresh = 104.665669;
+mbond.Case2Factor = 0.702378;
+mbond.Case2LengthTresh = 72.378313;
+mbond.Case3Factor = 1.207533;
+mbond.DefaultErr = 0.574503;
+mbond.DoubleCoef = 0.073200;
+mbond.DoubleLeftLengthTresh = 0.379831;
+mbond.DoubleMagic1 = 0.790503;
+mbond.DoubleMagic2 = 0.786827;
+mbond.DoubleRatioTresh = 8.575122;
+mbond.DoubleRightLengthTresh = 0.339834;
+mbond.DoubleTreshMax = 0.722373;
+mbond.DoubleTreshMin = 0.107749;
+mbond.LongBond = 98.216747;
+mbond.LongErr = 0.294706;
+mbond.MaxLen1 = 131.757683;
+mbond.MaxLen2 = 146.842210;
+mbond.MaxLen3 = 132.511245;
+mbond.MaxLen4 = 139.288255;
+mbond.MaxLen5 = 74.206661;
+mbond.MediumBond = 89.440360;
+mbond.MediumErr = 0.434682;
+mbond.MinLen1 = 78.625771;
+mbond.MinLen2 = 74.633694;
+mbond.ParBondsEps = 0.396634;
+mbond.TripleLeftLengthTresh = 0.261395;
+mbond.TripleRightLengthTresh = 0.234413;
+mbond.mbe1 = 0.076672;
+mbond.mbe2 = 0.181879;
+mbond.mbe3 = 0.160493;
+mbond.mbe4 = 0.136906;
+mbond.mbe5 = 0.154427;
+mbond.mbe6 = 0.175280;
+mbond.mbe7 = 0.400131;
+mbond.mbe_def = 0.631757;
+molecule.AngleTreshold = 0.257278;
+molecule.LengthFactor_default = 0.498034;
+molecule.LengthFactor_long = 0.331387;
+molecule.LengthFactor_medium = 0.385115;
+molecule.LengthValue_long = 108.533391;
+molecule.LengthValue_medium = 91.201845;
+molecule.SpaceMultiply = 1.150569;
+p_estimator.ApriorProb4SymbolCase = 0.692460;
+p_estimator.DefaultApriority = 0.502905;
+p_estimator.LogisticLocation = 0.961426;
+p_estimator.LogisticScale = 0.989250;
+p_estimator.MinRatio2ConsiderGrPr = 0.259882;
+p_estimator.UsePerimeterNormalization = 0;
+prefilterCV.BinarizerFrameGap = 1;
+prefilterCV.BinarizerThreshold = 169;
+prefilterCV.BorderPartProportion = 41;
+prefilterCV.MaxBadToGoodRatio = 2;
+prefilterCV.MaxNonBWPixelsProportion = 15;
+prefilterCV.MaxRectangleCropLineWidth = 21;
+prefilterCV.MaxRectangleCropLineWidthAlreadyBinarized = 0;
+prefilterCV.MinGoodPixelsCount = 17;
+prefilterCV.OtsuThresholdValue = 130;
+prefilterCV.StrongBinarizeSize = 4;
+prefilterCV.StrongBinarizeTresh = 1.313146;
+prefilterCV.UseOtsuPixelsAddition = 0;
+prefilterCV.WeakBinarizeSize = 5;
+prefilterCV.WeakBinarizeTresh = 0.943737;
+retinex.ContrastDropPercentage = 0.038246;
+retinex.ContrastNominal = 140;
+retinex.EndIteration = 13;
+retinex.IterationStep = 5;
+retinex.StartIteration = 3;
+routines.Algebra_IntersectionEps = 0.013238;
+routines.Algebra_SameLineEps = 0.214315;
+routines.Circle_AsCharFactor = 1.086772;
+routines.Circle_GapAngleMax = 0.219033;
+routines.Circle_GapRadiusMax = 1.306270;
+routines.Circle_MaxDeviation = 0.010233;
+routines.Circle_MinRadius = 6.227803;
+routines.LineThick_Grid = 10;
+separator.UseVoteArray = 0;
+separator.capHeightMax = 1.626769;
+separator.capHeightMin = 0.530470;
+separator.capHeightRatio = 0.345944;
+separator.capHeightRatio2 = 2.394881;
+separator.ext2charRatio = 1.680010;
+separator.extCapHeightMax = 2.199233;
+separator.extCapHeightMin = 0.269385;
+separator.extRatioMax = 1.549786;
+separator.extRatioMin = 0.296248;
+separator.gdConst = 1.454894;
+separator.getRatio1 = 0.926927;
+separator.getRatio2 = 1.213728;
+separator.hu_0_1 = 0.169181;
+separator.hu_0_2 = 0.219739;
+separator.hu_1_1 = 0.213581;
+separator.hu_1_2 = 0.090455;
+separator.hu_1_3 = 0.100023;
+separator.ltFactor1 = 0;
+separator.maxDensity = 0.899635;
+separator.minApproxSegsStrong = 1;
+separator.minApproxSegsWeak = 7;
+separator.minDensity = 0.191093;
+separator.specialSegmentsTreat = 10;
+separator.testSlashLine1 = 2.831270;
+separator.testSlashLine2 = 2.436796;
+skeleton.BaseMult = 0.120864;
+skeleton.BaseSmallErr = 0.117512;
+skeleton.BrokenRepairAngleEps = 0.186189;
+skeleton.BrokenRepairCoef1 = 1.274116;
+skeleton.BrokenRepairCoef2 = 3.242707;
+skeleton.BrokenRepairFactor = 3.286455;
+skeleton.ConnectBlockS = 10.799448;
+skeleton.ConnectFactor = 2.412895;
+skeleton.Dissolve2Const = 0.227941;
+skeleton.DissolveConst = 0.108697;
+skeleton.DissolveMinErr = 0.177484;
+skeleton.DistTreshLimFactor = 1.974879;
+skeleton.Join2Const = 0.240045;
+skeleton.Join3Const = 0.147932;
+skeleton.JoinVerticiesConst = 0.098870;
+skeleton.LongBondLen = 99.112591;
+skeleton.LongMul = 0.138084;
+skeleton.LongSmallErr = 0.080023;
+skeleton.MediumBondLen = 108.668327;
+skeleton.MediumMul = 0.179226;
+skeleton.MediumSmallErr = 0.107658;
+skeleton.ShortBondLen = 26.963686;
+skeleton.ShortMul = 0.295381;
+skeleton.ShrinkEps = 0.151628;
+skeleton.SlopeFact1 = 0.088580;
+skeleton.SlopeFact2 = 0.190300;
+utils.SlashLineDensity = 0.439441;
+wbe.MinimalSingleDownSegsCount = 4;
+wbe.PointsCompareDist = 1;
+wbe.SingleDownAngleMax = 50.229562;
+wbe.SingleDownCompareDist = 1;
+wbe.SingleDownDistancesMax = 10.900421;
+wbe.SingleDownEps = 2.398372;
+wbe.SingleDownLengthMax = 50.722094;
+wbe.SingleUpDefCoeff = 0.284690;
+wbe.SingleUpIncCoeff = 0.378218;
+wbe.SingleUpIncLengthTresh = 37.168361;
+wbe.SingleUpInterpolateEps = 0.209761;
+wbe.SingleUpMagicAddition = 0.206429;
+wbe.SingleUpSlopeThresh = 0.116888;
+wbe.SingleUpThickThresh = 1.614505;
+wbe.SomeTresh = 0.126759;
+weak_seg.RectangularCropAreaTreshold = 0.339871;
weak_seg.RectangularCropFitTreshold = 0.899138;
\ No newline at end of file
diff --git a/utils/indigo-service/service/test/data/test_from_schrodinger.sd.gz b/utils/indigo-service/backend/service/tests/data/test_from_schrodinger.sd.gz
similarity index 100%
rename from utils/indigo-service/service/test/data/test_from_schrodinger.sd.gz
rename to utils/indigo-service/backend/service/tests/data/test_from_schrodinger.sd.gz
diff --git a/utils/indigo-service/service/test/data/test_pubchem_10.sdf b/utils/indigo-service/backend/service/tests/data/test_pubchem_10.sdf
similarity index 100%
rename from utils/indigo-service/service/test/data/test_pubchem_10.sdf
rename to utils/indigo-service/backend/service/tests/data/test_pubchem_10.sdf
diff --git a/utils/indigo-service/service/test/tests.py b/utils/indigo-service/backend/service/tests/tests.py
similarity index 100%
rename from utils/indigo-service/service/test/tests.py
rename to utils/indigo-service/backend/service/tests/tests.py
diff --git a/utils/indigo-service/service/v2/__init__.py b/utils/indigo-service/backend/service/v2/__init__.py
similarity index 100%
rename from utils/indigo-service/service/v2/__init__.py
rename to utils/indigo-service/backend/service/v2/__init__.py
diff --git a/utils/indigo-service/service/v2/bingo_ql/__init__.py b/utils/indigo-service/backend/service/v2/bingo_ql/__init__.py
similarity index 100%
rename from utils/indigo-service/service/v2/bingo_ql/__init__.py
rename to utils/indigo-service/backend/service/v2/bingo_ql/__init__.py
diff --git a/utils/indigo-service/service/v2/bingo_ql/query.py b/utils/indigo-service/backend/service/v2/bingo_ql/query.py
similarity index 100%
rename from utils/indigo-service/service/v2/bingo_ql/query.py
rename to utils/indigo-service/backend/service/v2/bingo_ql/query.py
diff --git a/utils/indigo-service/service/v2/bingo_ql/test.py b/utils/indigo-service/backend/service/v2/bingo_ql/test.py
similarity index 100%
rename from utils/indigo-service/service/v2/bingo_ql/test.py
rename to utils/indigo-service/backend/service/v2/bingo_ql/test.py
diff --git a/utils/indigo-service/service/v2/celery_app.py b/utils/indigo-service/backend/service/v2/celery_app.py
similarity index 100%
rename from utils/indigo-service/service/v2/celery_app.py
rename to utils/indigo-service/backend/service/v2/celery_app.py
diff --git a/utils/indigo-service/service/v2/common/__init__.py b/utils/indigo-service/backend/service/v2/common/__init__.py
similarity index 100%
rename from utils/indigo-service/service/v2/common/__init__.py
rename to utils/indigo-service/backend/service/v2/common/__init__.py
diff --git a/utils/indigo-service/service/v2/common/util.py b/utils/indigo-service/backend/service/v2/common/util.py
similarity index 100%
rename from utils/indigo-service/service/v2/common/util.py
rename to utils/indigo-service/backend/service/v2/common/util.py
diff --git a/utils/indigo-service/service/v2/common_api.py b/utils/indigo-service/backend/service/v2/common_api.py
similarity index 75%
rename from utils/indigo-service/service/v2/common_api.py
rename to utils/indigo-service/backend/service/v2/common_api.py
index 124e86a503..4185452ca3 100644
--- a/utils/indigo-service/service/v2/common_api.py
+++ b/utils/indigo-service/backend/service/v2/common_api.py
@@ -3,7 +3,7 @@
from flask import Blueprint, jsonify
# import re
-# from v2.db.database import db_session
+from v2.db.database import db_session
from v2.indigo_api import indigo_init
common_api = Blueprint("common_api", __name__)
@@ -23,8 +23,10 @@ def version():
"""
versions = {}
- # if is_indigo_db():
- # versions['bingo_version'] = db_session.execute("SELECT Bingo.GetVersion();").fetchone()[0]
+ versions["bingo_version"] = db_session.execute(
+ "SELECT Bingo.GetVersion();"
+ ).fetchone()[0]
+
indigo = indigo_init()
versions["indigo_version"] = indigo.version()
@@ -38,14 +40,3 @@ def version():
# versions['imago_versions'] = imago_versions
return jsonify(versions), 200, {"Content-Type": "application/json"}
-
-
-#
-#
-# def is_indigo_db():
-# try:
-# import socket
-# socket.gethostbyname('indigo_db')
-# return True
-# except:
-# return False
diff --git a/utils/indigo-service/service/v2/db/BingoPostgresAdapter.py b/utils/indigo-service/backend/service/v2/db/BingoPostgresAdapter.py
similarity index 93%
rename from utils/indigo-service/service/v2/db/BingoPostgresAdapter.py
rename to utils/indigo-service/backend/service/v2/db/BingoPostgresAdapter.py
index 3818acaa1d..e1dcfdb16e 100644
--- a/utils/indigo-service/service/v2/db/BingoPostgresAdapter.py
+++ b/utils/indigo-service/backend/service/v2/db/BingoPostgresAdapter.py
@@ -3,6 +3,7 @@
import logging
import psycopg2
+import psycopg2.extras
from ..bingo_ql.query import QueryBuilder
from ..common.util import merge_dicts
@@ -10,6 +11,8 @@
from .models import LibraryMeta
bingo_logger = logging.getLogger("bingo")
+
+
# bingo_logger.addHandler(logging.FileHandler('/srv/api/app.log'))
@@ -28,7 +31,7 @@ def connection(self):
return self._connection
def _get_structure_sql(self, structure, params):
- stype = params["search_type"]
+ stype = params["type"]
if stype != "sim":
if stype.lower() == "molformula":
stype = "gross"
@@ -90,7 +93,7 @@ def library_create(self, library_name, user_data):
db_session.commit()
cursor = self.connection.cursor()
cursor.execute(
- "create table {0}(s serial, m bytea, p jsonb)".format(
+ "create table {0}(s serial, m text not null, p jsonb not null)".format(
self.get_table_name_for_id(metalib.library_id)
)
)
@@ -282,21 +285,29 @@ def do_search(self, params):
finally:
self.connection.commit()
+ def insert_sdf(self, library_id, data):
+ try:
+ cursor = self.connection.cursor()
+ insert_query = "insert into {0}(m, p) values %s".format(
+ self.get_table_name_for_id(library_id)
+ )
+ psycopg2.extras.execute_values(
+ cursor, insert_query, data, template=None, page_size=1000
+ )
+ finally:
+ self.connection.commit()
+
def create_indices(self, table_name):
try:
index_name = self.get_index_name(table_name)
cursor = self.connection.cursor()
- cursor.execute("drop index if exists {0}".format(index_name))
- cursor.execute(
- "drop index if exists {0}".format("id_" + index_name)
- )
cursor.execute(
- "create index {0} on {1} using bingo_idx (m bingo.bmolecule) with (IGNORE_STEREOCENTER_ERRORS=1,IGNORE_CISTRANS_ERRORS=1,FP_TAU_SIZE=0)".format(
+ "create index if not exists {0} on {1} using bingo_idx (m bingo.molecule) with (IGNORE_STEREOCENTER_ERRORS=1,IGNORE_CISTRANS_ERRORS=1,FP_TAU_SIZE=0)".format(
index_name, table_name
)
)
cursor.execute(
- "create index {0} on {1} (s)".format(
+ "create index if not exists {0} on {1} (s)".format(
"id_" + index_name, table_name
)
)
diff --git a/utils/indigo-service/service/v2/db/__init__.py b/utils/indigo-service/backend/service/v2/db/__init__.py
similarity index 100%
rename from utils/indigo-service/service/v2/db/__init__.py
rename to utils/indigo-service/backend/service/v2/db/__init__.py
diff --git a/utils/indigo-service/service/v2/db/database.py b/utils/indigo-service/backend/service/v2/db/database.py
similarity index 100%
rename from utils/indigo-service/service/v2/db/database.py
rename to utils/indigo-service/backend/service/v2/db/database.py
diff --git a/utils/indigo-service/service/v2/db/models.py b/utils/indigo-service/backend/service/v2/db/models.py
similarity index 94%
rename from utils/indigo-service/service/v2/db/models.py
rename to utils/indigo-service/backend/service/v2/db/models.py
index 951b400ce6..a76ae48635 100644
--- a/utils/indigo-service/service/v2/db/models.py
+++ b/utils/indigo-service/backend/service/v2/db/models.py
@@ -10,6 +10,7 @@
class LibraryMeta(Base):
__tablename__ = "library_metadata"
+ __table_args__ = {"schema": "indigoservice"}
library_id = Column(String(36), primary_key=True)
user_data = Column(JSONB)
service_data = Column(JSONB)
@@ -29,6 +30,7 @@ def __init__(self, name=None, user_data=None):
class User(Base):
__tablename__ = "users"
+ __table_args__ = {"schema": "indigoservice"}
user_id = Column(Integer, primary_key=True)
username = Column(String(50))
email = Column(String(100), unique=True)
diff --git a/utils/indigo-service/service/v2/imago_api.py b/utils/indigo-service/backend/service/v2/imago_api.py
similarity index 100%
rename from utils/indigo-service/service/v2/imago_api.py
rename to utils/indigo-service/backend/service/v2/imago_api.py
diff --git a/utils/indigo-service/service/v2/indigo_api.py b/utils/indigo-service/backend/service/v2/indigo_api.py
similarity index 98%
rename from utils/indigo-service/service/v2/indigo_api.py
rename to utils/indigo-service/backend/service/v2/indigo_api.py
index d3cd661b20..e831c05ed7 100644
--- a/utils/indigo-service/service/v2/indigo_api.py
+++ b/utils/indigo-service/backend/service/v2/indigo_api.py
@@ -317,21 +317,28 @@ def load_moldata(
md.is_query = False
else:
try:
- md.struct = indigo.loadMolecule(molstr)
- md.is_query = False
+ if not query:
+ md.struct = indigo.loadMolecule(molstr)
+ md.is_query = False
+ else:
+ md.struct = indigo.loadQueryMolecule(molstr)
+ md.is_query = True
except IndigoException:
try:
md.struct = indigo.loadQueryMolecule(molstr)
md.is_query = True
except IndigoException:
+ md.is_rxn = True
try:
- md.struct = indigo.loadReaction(molstr)
- md.is_rxn = True
- md.is_query = False
+ if not query:
+ md.struct = indigo.loadReaction(molstr)
+ md.is_query = False
+ else:
+ md.struct = indigo.loadQueryReaction(molstr)
+ md.is_query = True
except IndigoException:
try:
md.struct = indigo.loadQueryReaction(molstr)
- md.is_rxn = True
md.is_query = True
except IndigoException:
raise HttpException(
@@ -474,7 +481,7 @@ def wrapper(*args, **kwargs):
)
except IndigoException as e:
indigo_api_logger.error("IndigoException: {0}".format(e.value))
- indigo_api_logger.debug(traceback.format_exc())
+ indigo_api_logger.error(traceback.format_exc())
if json_output:
return (
jsonify({"error": "IndigoException: {0}".format(e.value)}),
diff --git a/utils/indigo-service/service/v2/libraries_api.py b/utils/indigo-service/backend/service/v2/libraries_api.py
similarity index 93%
rename from utils/indigo-service/service/v2/libraries_api.py
rename to utils/indigo-service/backend/service/v2/libraries_api.py
index 6d24f720b6..dbb78ab833 100644
--- a/utils/indigo-service/service/v2/libraries_api.py
+++ b/utils/indigo-service/backend/service/v2/libraries_api.py
@@ -3,13 +3,12 @@
import logging
import os
import re
-import subprocess
import traceback
import types
-import zlib
from time import time
import flask_restful
+import indigo
import redis
from flask import Blueprint, Response, request
from flask_httpauth import HTTPBasicAuth
@@ -17,6 +16,7 @@
from indigo.inchi import IndigoInchi
from indigo.renderer import IndigoRenderer
from marshmallow.exceptions import ValidationError
+from psycopg2.extras import Json
from pyparsing import ParseException
import config
@@ -63,7 +63,7 @@ def _prepare_row(row):
result_props[p["a"]] = p["b"]
record = {
"id": row[0],
- "structure": zlib.decompress(row[1], 16 + zlib.MAX_WBITS),
+ "structure": row[1],
"properties": result_props,
"library_id": row[3],
}
@@ -107,7 +107,13 @@ def search_total(self, params):
class Searcher(flask_restful.Resource):
def post(self):
"""
- Search structures
+ Search in library
+ ---
+ tags:
+ - bingo
+ responses:
+ 200:
+ description: search results
"""
libraries_api_logger.info(
"[REQUEST] POST /search {0}".format(request.data)
@@ -119,7 +125,9 @@ def post(self):
"error": "Invalid input JSON: {0}".format(request.data)
}, 400
try:
- search_params = SearcherSchema(strict=True).load(input_dict).data
+ print(input_dict)
+ search_params = SearcherSchema().load(input_dict)
+ print(search_params)
for library_id in search_params["library_ids"]:
if not LibraryMeta.query.filter(
LibraryMeta.library_id == library_id
@@ -130,7 +138,7 @@ def post(self):
results = []
for row in cursor:
item = _prepare_row(row)
- item["structure"] = item["structure"].decode()
+ item["structure"] = item["structure"]
results.append(item)
libraries_api_logger.info(
"[RESPONSE] POST /search found {0} items".format(len(results))
@@ -239,7 +247,7 @@ def post(self):
}, 400
try:
- data = LibrarySchema(strict=True).load(input_dict).data
+ data = LibrarySchema().load(input_dict)
library_id = libraries_api.adapter.library_create(
data["name"], data["user_data"]
)
@@ -387,31 +395,32 @@ def save_file(library_id, stream, mime_type):
@staticmethod
def external_insert(library_id, path):
- args = [
- os.path.join("/srv", "uploader", "indigo_uploader"),
- "-c",
- os.path.join("/srv", "uploader", "uploader_config.yml"),
- path,
- libraries_api.adapter.get_table_name_for_id(library_id),
- ]
-
- proc = subprocess.Popen(
- args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
- )
- out, err = proc.communicate()
- out = out.decode("utf-8")
- time_re = re.search(r"Insert total time = (\d+) ms", out)
- total_time = round(int(time_re.group(1)) / 1000.0, 3)
- speed_re = re.search(
- r"Average insert time = (\d+) structures per second", out
- )
- average_speed = int(speed_re.group(1))
- struct_re = re.search(r"Total structures processed = (\d+)", out)
- struct_count = int(struct_re.group(1))
+ struct_count = 0
+ start = time()
+ data = []
+ molecule: indigo.IndigoObject
+ for molecule in libraries_api.indigo.iterateSDFile(path):
+ props = []
+ for prop in molecule.iterateProperties():
+ prop_name = prop.name()
+ prop_value = molecule.getProperty(prop_name)
+ prop = {}
+ prop["x"] = prop_name.lower()
+ try:
+ prop["y"] = json.loads(prop_value)
+ except Exception:
+ prop["y"] = prop_value
+ prop["a"] = prop_name
+ prop["b"] = prop_value
+ props.append(prop)
+ data.append((molecule.rawData(), Json(props)))
+ struct_count += 1
+ libraries_api.adapter.insert_sdf(library_id, data)
+ total_time = time() - start
return {
"insert_time": total_time,
"structures_count": struct_count,
- "insert_speed": average_speed,
+ "insert_speed": struct_count / total_time,
}
@staticmethod
@@ -543,7 +552,7 @@ def post(self):
"error": "Invalid input JSON: {0}".format(request.data)
}, 400
try:
- input_dict = UserSchema(strict=True).load(input_dict).data
+ input_dict = UserSchema().load(input_dict)
if Usermodel.query.filter(
Usermodel.email == input_dict["email"]
).first():
diff --git a/utils/indigo-service/service/v2/validation.py b/utils/indigo-service/backend/service/v2/validation.py
similarity index 92%
rename from utils/indigo-service/service/v2/validation.py
rename to utils/indigo-service/backend/service/v2/validation.py
index 664224dce8..7bb6ed2fa4 100644
--- a/utils/indigo-service/service/v2/validation.py
+++ b/utils/indigo-service/backend/service/v2/validation.py
@@ -163,7 +163,7 @@ class IndigoAutomapSchema(IndigoRequestSchema):
class SearcherSchema(Schema):
- search_type = fields.Str(
+ type = fields.Str(
load_from="type",
required=True,
validate=OneOf(["sub", "exact", "sim", "molFormula"]),
@@ -179,8 +179,9 @@ class SearcherSchema(Schema):
options = fields.Str(missing="")
@post_load
- def strip_text_query(self, data):
+ def strip_text_query(self, data, **kwargs):
data["query_text"] = data["query_text"].strip()
+ return data
@validates_schema
def query_exists(self, data, **kwargs):
@@ -188,23 +189,23 @@ def query_exists(self, data, **kwargs):
raise ValidationError("Empty queries.")
@validates_schema
- def search_type_exists(self, data, **kwargs):
- if "search_type" not in data:
+ def type_exists(self, data, **kwargs):
+ if "type" not in data:
raise ValidationError(
"No search type selected, must be one of: 'sub', 'exact, sim', 'molFormula'"
)
- if data["search_type"] not in ("sub", "exact", "sim", "molFormula"):
+ if data["type"] not in ("sub", "exact", "sim", "molFormula"):
raise ValidationError(
"Wrong search type {0}, must be one of 'sub', 'exact, sim', 'molFormula'".format(
- data["search_type"]
+ data["type"]
)
)
@validates_schema
def sim_min_max(self, data, **kwargs):
if (
- data.get("search_type")
- and "sim" in data.get("search_type")
+ data.get("type")
+ and "sim" in data.get("type")
and data.get("min_sim") > data.get("max_sim")
):
raise ValidationError("Similarity min can not be greater than max")
@@ -212,8 +213,8 @@ def sim_min_max(self, data, **kwargs):
@validates_schema
def sim_min_range(self, data, **kwargs):
if (
- data.get("search_type")
- and "sim" in data.get("search_type")
+ data.get("type")
+ and "sim" in data.get("type")
and (data.get("min_sim") < 0 or data.get("min_sim") >= 1)
):
raise ValidationError(
@@ -224,13 +225,13 @@ def sim_min_range(self, data, **kwargs):
def sim_max_range(self, data, **kwargs):
print(
data,
- data.get("search_type"),
+ data.get("type"),
data.get("max_sim"),
data.get("min_sim"),
)
if (
- data.get("search_type")
- and "sim" in data.get("search_type")
+ data.get("type")
+ and "sim" in data.get("type")
and (data.get("max_sim") <= 0 or data.get("max_sim") > 1)
):
raise ValidationError(
diff --git a/utils/indigo-service/db/Dockerfile b/utils/indigo-service/db/Dockerfile
index 9d406a5ec8..81412ca438 100644
--- a/utils/indigo-service/db/Dockerfile
+++ b/utils/indigo-service/db/Dockerfile
@@ -1,19 +1,4 @@
-FROM library/postgres:9.6
-
-ENV POSTGRES_PASSWORD postgres
-
-# Install
-RUN apt-get update -qq && apt-get upgrade -qq -y
-RUN apt-get install -y --no-install-recommends p7zip-full
-
-# Copy cartridge
-COPY ./lib/bingo-postgres*.7z /opt/
-RUN cd /opt && \
- 7z x bingo-postgres*.7z && \
- mv /opt/bingo-postgres*/ /opt/bingo-postgres/ && \
- chmod 777 -R /opt/bingo-postgres/
+FROM epmlsop/bingo-postgres:12-latest
# Add init scripts
-COPY ./db/configure_postgres.sh /docker-entrypoint-initdb.d/
-COPY ./db/init_database.sql /opt/
-COPY ./db/postgresql.conf /opt/
+COPY ./init.sql /docker-entrypoint-initdb.d/
diff --git a/utils/indigo-service/db/configure_postgres.sh b/utils/indigo-service/db/configure_postgres.sh
deleted file mode 100644
index 84145e7442..0000000000
--- a/utils/indigo-service/db/configure_postgres.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-cd /opt/bingo-postgres/
-source bingo-pg-install.sh -y
-psql -U postgres -c "create database indigoservice"
-psql -U postgres -d indigoservice -f ./bingo_install.sql
-psql -U postgres -d indigoservice -f /opt/init_database.sql
-cp /opt/postgresql.conf /var/lib/postgresql/data
diff --git a/utils/indigo-service/db/init.sql b/utils/indigo-service/db/init.sql
new file mode 100644
index 0000000000..649e05144c
--- /dev/null
+++ b/utils/indigo-service/db/init.sql
@@ -0,0 +1,24 @@
+create schema indigoservice;
+
+create table indigoservice.library_metadata
+(
+ library_id varchar(36) primary key,
+ service_data jsonb,
+ user_data jsonb,
+ index_data jsonb
+);
+
+create table indigoservice.users
+(
+ user_id serial primary key,
+ username varchar(50) not null,
+ password varchar(100) not null,
+ email varchar(100) not null,
+ foreign_auth_provider varchar(10),
+ foreign_auth_id integer,
+ user_created timestamp default now()
+);
+
+-- upload testing schema
+drop schema if exists test_upload cascade;
+create schema test_upload;
diff --git a/utils/indigo-service/db/init_database.sql b/utils/indigo-service/db/init_database.sql
deleted file mode 100644
index ebe8a9729e..0000000000
--- a/utils/indigo-service/db/init_database.sql
+++ /dev/null
@@ -1,30 +0,0 @@
--- indigoservice user
-create user indigoservice with password 'p@ssw0rd';
-create schema indigoservice authorization indigoservice;
-grant all on table pg_depend to indigoservice;
-grant usage on schema bingo to indigoservice;
-grant select on table bingo.bingo_config to indigoservice;
-grant select on table bingo.bingo_tau_config to indigoservice;
-
--- data schema
-create table indigoservice.library_metadata(
- library_id varchar(36) primary key,
- service_data jsonb,
- user_data jsonb,
- index_data jsonb);
-grant all on table indigoservice.library_metadata to indigoservice;
-
-create table indigoservice.users(
- user_id serial primary key,
- username varchar(50) not null,
- password varchar(100) not null,
- email varchar(100) not null,
- foreign_auth_provider varchar(10),
- foreign_auth_id integer,
- user_created timestamp default now());
-grant all on table indigoservice.users to indigoservice;
-grant all on sequence indigoservice.users_user_id_seq to indigoservice;
-
--- upload testing schema
-drop schema if exists test_upload cascade;
-create schema test_upload authorization indigoservice;
diff --git a/utils/indigo-service/db/postgresql.conf b/utils/indigo-service/db/postgresql.conf
deleted file mode 100644
index 20af482e70..0000000000
--- a/utils/indigo-service/db/postgresql.conf
+++ /dev/null
@@ -1,643 +0,0 @@
-# -----------------------------
-# PostgreSQL configuration file
-# -----------------------------
-#
-# This file consists of lines of the form:
-#
-# name = value
-#
-# (The "=" is optional.) Whitespace may be used. Comments are introduced with
-# "#" anywhere on a line. The complete list of parameter names and allowed
-# values can be found in the PostgreSQL documentation.
-#
-# The commented-out settings shown in this file represent the default values.
-# Re-commenting a setting is NOT sufficient to revert it to the default value;
-# you need to reload the server.
-#
-# This file is read on server startup and when the server receives a SIGHUP
-# signal. If you edit the file on a running system, you have to SIGHUP the
-# server for the changes to take effect, or use "pg_ctl reload". Some
-# parameters, which are marked below, require a server shutdown and restart to
-# take effect.
-#
-# Any parameter can also be given as a command-line option to the server, e.g.,
-# "postgres -c log_connections=on". Some parameters can be changed at run time
-# with the "SET" SQL command.
-#
-# Memory units: kB = kilobytes Time units: ms = milliseconds
-# MB = megabytes s = seconds
-# GB = gigabytes min = minutes
-# TB = terabytes h = hours
-# d = days
-
-
-#------------------------------------------------------------------------------
-# FILE LOCATIONS
-#------------------------------------------------------------------------------
-
-# The default values of these variables are driven from the -D command-line
-# option or PGDATA environment variable, represented here as ConfigDir.
-
-data_directory = '/var/lib/postgresql/data' # use data in another directory
- # (change requires restart)
-hba_file = '/var/lib/postgresql/data/pg_hba.conf' # host-based authentication file
- # (change requires restart)
-ident_file = '/var/lib/postgresql/data/pg_ident.conf' # ident configuration file
- # (change requires restart)
-
-# If external_pid_file is not explicitly set, no extra PID file is written.
-#external_pid_file = '' # write an extra PID file
- # (change requires restart)
-
-
-#------------------------------------------------------------------------------
-# CONNECTIONS AND AUTHENTICATION
-#------------------------------------------------------------------------------
-
-# - Connection Settings -
-
-listen_addresses = '*' # what IP address(es) to listen on;
- # comma-separated list of addresses;
- # defaults to 'localhost'; use '*' for all
- # (change requires restart)
-port = 5432 # (change requires restart)
-max_connections = 100 # (change requires restart)
-#superuser_reserved_connections = 3 # (change requires restart)
-#unix_socket_directories = '/tmp' # comma-separated list of directories
- # (change requires restart)
-#unix_socket_group = '' # (change requires restart)
-#unix_socket_permissions = 0777 # begin with 0 to use octal notation
- # (change requires restart)
-#bonjour = off # advertise server via Bonjour
- # (change requires restart)
-#bonjour_name = '' # defaults to the computer name
- # (change requires restart)
-
-# - Security and Authentication -
-
-#authentication_timeout = 1min # 1s-600s
-#ssl = off # (change requires restart)
-#ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers
- # (change requires restart)
-#ssl_prefer_server_ciphers = on # (change requires restart)
-#ssl_ecdh_curve = 'prime256v1' # (change requires restart)
-#ssl_cert_file = 'server.crt' # (change requires restart)
-#ssl_key_file = 'server.key' # (change requires restart)
-#ssl_ca_file = '' # (change requires restart)
-#ssl_crl_file = '' # (change requires restart)
-#password_encryption = on
-#db_user_namespace = off
-#row_security = on
-
-# GSSAPI using Kerberos
-#krb_server_keyfile = ''
-#krb_caseins_users = off
-
-# - TCP Keepalives -
-# see "man 7 tcp" for details
-
-#tcp_keepalives_idle = 0 # TCP_KEEPIDLE, in seconds;
- # 0 selects the system default
-#tcp_keepalives_interval = 0 # TCP_KEEPINTVL, in seconds;
- # 0 selects the system default
-#tcp_keepalives_count = 0 # TCP_KEEPCNT;
- # 0 selects the system default
-
-
-#------------------------------------------------------------------------------
-# RESOURCE USAGE (except WAL)
-#------------------------------------------------------------------------------
-
-# - Memory -
-
-shared_buffers = 512MB # min 128kB
- # (change requires restart)
-#huge_pages = try # on, off, or try
- # (change requires restart)
-#temp_buffers = 8MB # min 800kB
-#max_prepared_transactions = 0 # zero disables the feature
- # (change requires restart)
-# Caution: it is not advisable to set max_prepared_transactions nonzero unless
-# you actively intend to use prepared transactions.
-work_mem = 16MB # min 64kB
-maintenance_work_mem = 512MB # min 1MB
-#replacement_sort_tuples = 150000 # limits use of replacement selection sort
-#autovacuum_work_mem = -1 # min 1MB, or -1 to use maintenance_work_mem
-#max_stack_depth = 2MB # min 100kB
-dynamic_shared_memory_type = posix # the default is the first option
- # supported by the operating system:
- # posix
- # sysv
- # windows
- # mmap
- # use none to disable dynamic shared memory
- # (change requires restart)
-
-# - Disk -
-
-#temp_file_limit = -1 # limits per-process temp file space
- # in kB, or -1 for no limit
-
-# - Kernel Resource Usage -
-
-#max_files_per_process = 1000 # min 25
- # (change requires restart)
-#shared_preload_libraries = '' # (change requires restart)
-
-# - Cost-Based Vacuum Delay -
-
-#vacuum_cost_delay = 0 # 0-100 milliseconds
-#vacuum_cost_page_hit = 1 # 0-10000 credits
-#vacuum_cost_page_miss = 10 # 0-10000 credits
-#vacuum_cost_page_dirty = 20 # 0-10000 credits
-#vacuum_cost_limit = 200 # 1-10000 credits
-
-# - Background Writer -
-
-#bgwriter_delay = 200ms # 10-10000ms between rounds
-#bgwriter_lru_maxpages = 100 # 0-1000 max buffers written/round
-#bgwriter_lru_multiplier = 2.0 # 0-10.0 multiplier on buffers scanned/round
-#bgwriter_flush_after = 512kB # measured in pages, 0 disables
-
-# - Asynchronous Behavior -
-
-#effective_io_concurrency = 1 # 1-1000; 0 disables prefetching
-#max_worker_processes = 8 # (change requires restart)
-#max_parallel_workers_per_gather = 0 # taken from max_worker_processes
-#old_snapshot_threshold = -1 # 1min-60d; -1 disables; 0 is immediate
- # (change requires restart)
-#backend_flush_after = 0 # measured in pages, 0 disables
-
-
-#------------------------------------------------------------------------------
-# WRITE AHEAD LOG
-#------------------------------------------------------------------------------
-
-# - Settings -
-
-#wal_level = minimal # minimal, replica, or logical
- # (change requires restart)
-#fsync = on # flush data to disk for crash safety
- # (turning this off can cause
- # unrecoverable data corruption)
-#synchronous_commit = on # synchronization level;
- # off, local, remote_write, remote_apply, or on
-#wal_sync_method = fsync # the default is the first option
- # supported by the operating system:
- # open_datasync
- # fdatasync (default on Linux)
- # fsync
- # fsync_writethrough
- # open_sync
-#full_page_writes = on # recover from partial page writes
-#wal_compression = off # enable compression of full-page writes
-#wal_log_hints = off # also do full page writes of non-critical updates
- # (change requires restart)
-wal_buffers = 16MB # min 32kB, -1 sets based on shared_buffers
- # (change requires restart)
-#wal_writer_delay = 200ms # 1-10000 milliseconds
-#wal_writer_flush_after = 1MB # measured in pages, 0 disables
-
-#commit_delay = 0 # range 0-100000, in microseconds
-#commit_siblings = 5 # range 1-1000
-
-# - Checkpoints -
-
-#checkpoint_timeout = 5min # range 30s-1d
-#max_wal_size = 1GB
-#min_wal_size = 80MB
-checkpoint_completion_target = 0.7 # checkpoint target duration, 0.0 - 1.0
-#checkpoint_flush_after = 256kB # measured in pages, 0 disables
-#checkpoint_warning = 30s # 0 disables
-
-# - Archiving -
-
-#archive_mode = off # enables archiving; off, on, or always
- # (change requires restart)
-#archive_command = '' # command to use to archive a logfile segment
- # placeholders: %p = path of file to archive
- # %f = file name only
- # e.g. 'test ! -f /mnt/server/archivedir/%f && cp %p /mnt/server/archivedir/%f'
-#archive_timeout = 0 # force a logfile segment switch after this
- # number of seconds; 0 disables
-
-
-#------------------------------------------------------------------------------
-# REPLICATION
-#------------------------------------------------------------------------------
-
-# - Sending Server(s) -
-
-# Set these on the master and on any standby that will send replication data.
-
-#max_wal_senders = 0 # max number of walsender processes
- # (change requires restart)
-#wal_keep_segments = 0 # in logfile segments, 16MB each; 0 disables
-#wal_sender_timeout = 60s # in milliseconds; 0 disables
-
-#max_replication_slots = 0 # max number of replication slots
- # (change requires restart)
-#track_commit_timestamp = off # collect timestamp of transaction commit
- # (change requires restart)
-
-# - Master Server -
-
-# These settings are ignored on a standby server.
-
-#synchronous_standby_names = '' # standby servers that provide sync rep
- # number of sync standbys and comma-separated list of application_name
- # from standby(s); '*' = all
-#vacuum_defer_cleanup_age = 0 # number of xacts by which cleanup is delayed
-
-# - Standby Servers -
-
-# These settings are ignored on a master server.
-
-#hot_standby = off # "on" allows queries during recovery
- # (change requires restart)
-#max_standby_archive_delay = 30s # max delay before canceling queries
- # when reading WAL from archive;
- # -1 allows indefinite delay
-#max_standby_streaming_delay = 30s # max delay before canceling queries
- # when reading streaming WAL;
- # -1 allows indefinite delay
-#wal_receiver_status_interval = 10s # send replies at least this often
- # 0 disables
-#hot_standby_feedback = off # send info from standby to prevent
- # query conflicts
-#wal_receiver_timeout = 60s # time that receiver waits for
- # communication from master
- # in milliseconds; 0 disables
-#wal_retrieve_retry_interval = 5s # time to wait before retrying to
- # retrieve WAL after a failed attempt
-
-
-#------------------------------------------------------------------------------
-# QUERY TUNING
-#------------------------------------------------------------------------------
-
-# - Planner Method Configuration -
-
-#enable_bitmapscan = on
-#enable_hashagg = on
-#enable_hashjoin = on
-#enable_indexscan = on
-#enable_indexonlyscan = on
-#enable_material = on
-#enable_mergejoin = on
-#enable_nestloop = on
-#enable_seqscan = on
-#enable_sort = on
-#enable_tidscan = on
-
-# - Planner Cost Constants -
-
-#seq_page_cost = 1.0 # measured on an arbitrary scale
-#random_page_cost = 4.0 # same scale as above
-#cpu_tuple_cost = 0.01 # same scale as above
-#cpu_index_tuple_cost = 0.005 # same scale as above
-#cpu_operator_cost = 0.0025 # same scale as above
-#parallel_tuple_cost = 0.1 # same scale as above
-#parallel_setup_cost = 1000.0 # same scale as above
-#min_parallel_relation_size = 8MB
-#effective_cache_size = 4GB
-
-# - Genetic Query Optimizer -
-
-#geqo = on
-#geqo_threshold = 12
-#geqo_effort = 5 # range 1-10
-#geqo_pool_size = 0 # selects default based on effort
-#geqo_generations = 0 # selects default based on effort
-#geqo_selection_bias = 2.0 # range 1.5-2.0
-#geqo_seed = 0.0 # range 0.0-1.0
-
-# - Other Planner Options -
-
-#default_statistics_target = 100 # range 1-10000
-#constraint_exclusion = partition # on, off, or partition
-#cursor_tuple_fraction = 0.1 # range 0.0-1.0
-#from_collapse_limit = 8
-#join_collapse_limit = 8 # 1 disables collapsing of explicit
- # JOIN clauses
-#force_parallel_mode = off
-
-
-#------------------------------------------------------------------------------
-# ERROR REPORTING AND LOGGING
-#------------------------------------------------------------------------------
-
-# - Where to Log -
-
-#log_destination = 'stderr' # Valid values are combinations of
- # stderr, csvlog, syslog, and eventlog,
- # depending on platform. csvlog
- # requires logging_collector to be on.
-
-# This is used when logging to stderr:
-logging_collector = on # Enable capturing of stderr and csvlog
- # into log files. Required to be on for
- # csvlogs.
- # (change requires restart)
-
-# These are only used if logging_collector is on:
-log_directory = 'pg_log' # directory where log files are written,
- # can be absolute or relative to PGDATA
-log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log' # log file name pattern,
- # can include strftime() escapes
-log_file_mode = 0600 # creation mode for log files,
- # begin with 0 to use octal notation
-log_truncate_on_rotation = on # If on, an existing log file with the
- # same name as the new log file will be
- # truncated rather than appended to.
- # But such truncation only occurs on
- # time-driven rotation, not on restarts
- # or size-driven rotation. Default is
- # off, meaning append to existing files
- # in all cases.
-log_rotation_age = 1d # Automatic rotation of logfiles will
- # happen after that time. 0 disables.
-log_rotation_size = 10MB # Automatic rotation of logfiles will
- # happen after that much log output.
- # 0 disables.
-
-# These are relevant when logging to syslog:
-#syslog_facility = 'LOCAL0'
-#syslog_ident = 'postgres'
-#syslog_sequence_numbers = on
-#syslog_split_messages = on
-
-# This is only relevant when logging to eventlog (win32):
-# (change requires restart)
-#event_source = 'PostgreSQL'
-
-# - When to Log -
-
-#client_min_messages = notice # values in order of decreasing detail:
- # debug5
- # debug4
- # debug3
- # debug2
- # debug1
- # log
- # notice
- # warning
- # error
-
-log_min_messages = info # values in order of decreasing detail:
- # debug5
- # debug4
- # debug3
- # debug2
- # debug1
- # info
- # notice
- # warning
- # error
- # log
- # fatal
- # panic
-
-#log_min_error_statement = error # values in order of decreasing detail:
- # debug5
- # debug4
- # debug3
- # debug2
- # debug1
- # info
- # notice
- # warning
- # error
- # log
- # fatal
- # panic (effectively off)
-
-#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements
- # and their durations, > 0 logs only
- # statements running at least this number
- # of milliseconds
-
-
-# - What to Log -
-
-#debug_print_parse = off
-#debug_print_rewritten = off
-#debug_print_plan = off
-#debug_pretty_print = on
-#log_checkpoints = off
-#log_connections = off
-#log_disconnections = off
-#log_duration = off
-#log_error_verbosity = default # terse, default, or verbose messages
-#log_hostname = off
-log_line_prefix = '%t [%p-%l] %q%u@%d ' # special values:
- # %a = application name
- # %u = user name
- # %d = database name
- # %r = remote host and port
- # %h = remote host
- # %p = process ID
- # %t = timestamp without milliseconds
- # %m = timestamp with milliseconds
- # %n = timestamp with milliseconds (as a Unix epoch)
- # %i = command tag
- # %e = SQL state
- # %c = session ID
- # %l = session line number
- # %s = session start timestamp
- # %v = virtual transaction ID
- # %x = transaction ID (0 if none)
- # %q = stop here in non-session
- # processes
- # %% = '%'
- # e.g. '<%u%%%d> '
-#log_lock_waits = off # log lock waits >= deadlock_timeout
-#log_statement = 'none' # none, ddl, mod, all
-#log_replication_commands = off
-#log_temp_files = -1 # log temporary files equal or larger
- # than the specified size in kilobytes;
- # -1 disables, 0 logs all temp files
-log_timezone = 'UTC'
-
-
-# - Process Title -
-
-#cluster_name = '' # added to process titles if nonempty
- # (change requires restart)
-#update_process_title = on
-
-
-#------------------------------------------------------------------------------
-# RUNTIME STATISTICS
-#------------------------------------------------------------------------------
-
-# - Query/Index Statistics Collector -
-
-#track_activities = on
-#track_counts = on
-#track_io_timing = off
-#track_functions = none # none, pl, all
-#track_activity_query_size = 1024 # (change requires restart)
-#stats_temp_directory = 'pg_stat_tmp'
-
-
-# - Statistics Monitoring -
-
-#log_parser_stats = off
-#log_planner_stats = off
-#log_executor_stats = off
-#log_statement_stats = off
-
-
-#------------------------------------------------------------------------------
-# AUTOVACUUM PARAMETERS
-#------------------------------------------------------------------------------
-
-#autovacuum = on # Enable autovacuum subprocess? 'on'
- # requires track_counts to also be on.
-#log_autovacuum_min_duration = -1 # -1 disables, 0 logs all actions and
- # their durations, > 0 logs only
- # actions running at least this number
- # of milliseconds.
-#autovacuum_max_workers = 3 # max number of autovacuum subprocesses
- # (change requires restart)
-#autovacuum_naptime = 1min # time between autovacuum runs
-#autovacuum_vacuum_threshold = 50 # min number of row updates before
- # vacuum
-#autovacuum_analyze_threshold = 50 # min number of row updates before
- # analyze
-#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum
-#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze
-#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum
- # (change requires restart)
-#autovacuum_multixact_freeze_max_age = 400000000 # maximum multixact age
- # before forced vacuum
- # (change requires restart)
-#autovacuum_vacuum_cost_delay = 20ms # default vacuum cost delay for
- # autovacuum, in milliseconds;
- # -1 means use vacuum_cost_delay
-#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for
- # autovacuum, -1 means use
- # vacuum_cost_limit
-
-
-#------------------------------------------------------------------------------
-# CLIENT CONNECTION DEFAULTS
-#------------------------------------------------------------------------------
-
-# - Statement Behavior -
-
-#search_path = '"$user", public' # schema names
-#default_tablespace = '' # a tablespace name, '' uses the default
-#temp_tablespaces = '' # a list of tablespace names, '' uses
- # only default tablespace
-#check_function_bodies = on
-#default_transaction_isolation = 'read committed'
-#default_transaction_read_only = off
-#default_transaction_deferrable = off
-#session_replication_role = 'origin'
-statement_timeout = 1800000 # in milliseconds, 0 is disabled
-#lock_timeout = 0 # in milliseconds, 0 is disabled
-#idle_in_transaction_session_timeout = 0 # in milliseconds, 0 is disabled
-#vacuum_freeze_min_age = 50000000
-#vacuum_freeze_table_age = 150000000
-#vacuum_multixact_freeze_min_age = 5000000
-#vacuum_multixact_freeze_table_age = 150000000
-#bytea_output = 'hex' # hex, escape
-#xmlbinary = 'base64'
-#xmloption = 'content'
-#gin_fuzzy_search_limit = 0
-#gin_pending_list_limit = 4MB
-
-# - Locale and Formatting -
-
-datestyle = 'iso, mdy'
-#intervalstyle = 'postgres'
-timezone = 'UTC'
-#timezone_abbreviations = 'Default' # Select the set of available time zone
- # abbreviations. Currently, there are
- # Default
- # Australia (historical usage)
- # India
- # You can create your own file in
- # share/timezonesets/.
-#extra_float_digits = 0 # min -15, max 3
-#client_encoding = sql_ascii # actually, defaults to database
- # encoding
-
-# These settings are initialized by initdb, but they can be changed.
-lc_messages = 'en_US.UTF-8' # locale for system error message
- # strings
-lc_monetary = 'en_US.UTF-8' # locale for monetary formatting
-lc_numeric = 'en_US.UTF-8' # locale for number formatting
-lc_time = 'en_US.UTF-8' # locale for time formatting
-
-# default configuration for text search
-default_text_search_config = 'pg_catalog.english'
-
-# - Other Defaults -
-
-#dynamic_library_path = '$libdir'
-#local_preload_libraries = ''
-#session_preload_libraries = ''
-
-
-#------------------------------------------------------------------------------
-# LOCK MANAGEMENT
-#------------------------------------------------------------------------------
-
-#deadlock_timeout = 1s
-#max_locks_per_transaction = 64 # min 10
- # (change requires restart)
-#max_pred_locks_per_transaction = 64 # min 10
- # (change requires restart)
-
-
-#------------------------------------------------------------------------------
-# VERSION/PLATFORM COMPATIBILITY
-#------------------------------------------------------------------------------
-
-# - Previous PostgreSQL Versions -
-
-#array_nulls = on
-#backslash_quote = safe_encoding # on, off, or safe_encoding
-#default_with_oids = off
-#escape_string_warning = on
-#lo_compat_privileges = off
-#operator_precedence_warning = off
-#quote_all_identifiers = off
-#sql_inheritance = on
-#standard_conforming_strings = on
-#synchronize_seqscans = on
-
-# - Other Platforms and Clients -
-
-#transform_null_equals = off
-
-
-#------------------------------------------------------------------------------
-# ERROR HANDLING
-#------------------------------------------------------------------------------
-
-#exit_on_error = off # terminate session on any error?
-#restart_after_crash = on # reinitialize after backend crash?
-
-
-#------------------------------------------------------------------------------
-# CONFIG FILE INCLUDES
-#------------------------------------------------------------------------------
-
-# These options allow settings to be loaded from files other than the
-# default postgresql.conf.
-
-#include_dir = 'conf.d' # include files ending in '.conf' from
- # directory 'conf.d'
-#include_if_exists = 'exists.conf' # include file only if it exists
-#include = 'special.conf' # include file
-
-
-#------------------------------------------------------------------------------
-# CUSTOMIZED OPTIONS
-#------------------------------------------------------------------------------
-
-# Add settings for extensions here
diff --git a/utils/indigo-service/db/uploader_config.yml b/utils/indigo-service/db/uploader_config.yml
deleted file mode 100644
index f9fc45de44..0000000000
--- a/utils/indigo-service/db/uploader_config.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-database:
- url: indigo_db
- db: indigoservice
- user: indigoservice
- pass: p@ssw0rd
- schema: indigoservice
-general:
- buffer_size: 10000
- structure_column: m
- properties_column: p
\ No newline at end of file
diff --git a/utils/indigo-service/doc/db/schema.md b/utils/indigo-service/doc/db/schema.md
deleted file mode 100644
index 3683650df7..0000000000
--- a/utils/indigo-service/doc/db/schema.md
+++ /dev/null
@@ -1,54 +0,0 @@
-
-## База данных indigo-service
-
-Мы используем [PostgreSQL 9.4](http://www.postgresql.org/docs/9.4/static/release-9-4.html) с установленным картриждем [Bingo](http://lifescience.opensource.epam.com/bingo/bingo-postgres.html).
-
-Все действия сервис выполняет в PostgreSQL schema ```indigoservice``` под одноимённым пользователем.
-
-### Таблицы
-
-Набор стурктур, загружаемый пользователем в формате [SD-файла](https://en.wikipedia.org/wiki/Chemical_table_file#SDF) мы называем библиотекой (*library*).
-
-#### ```library_metadata```
-Каждая запись в данной таблице соотвествует одной библиотеке сервиса, сами данные библиотеки хранятся в таблице ```bingo_structures_XXX``` (см. ниже).
-
-- **library\_id** *varchar(36) primary key*
-
-уникальный ID; вычисляется на основании имени, указанного пользователем при создании библиотеки
-
-- **service_data** *jsonb*
-
-сервисные данные о библиотеке: имя библиотеки, время создания, количество структур и пр.
-
-- **metadata** *jsonb*
-
-дополнительные данные от пользователя
-
-- **index_data** *jsonb*
-
-поисковые данные
-
-#### ```bingo_structures_XXX```
-Таблицы данного типа создаются сервисом (c помощью API метода ```POST /libraries```), одной библиотеке соответствует одна таблица. ```XXX``` – это **library_id** из таблицы ```library_metadata``` (нормализованный с помощью регулярного выражения ```s/-/_/g```). Одна запись соответствует одной структуре загруженного файла. Загрузка данных в таблицу осуществляется посредством API метода ```POST /libraries/{library_id}/uploads```. Имена полей намеренно выбраны короткими в целях оптимизации производительности поисковых запросов.
-
-- **s** *serial*
-
-внутренний ID структуры
-
-- **m** *bytea*
-
-структура в формате [Molfile](https://en.wikipedia.org/wiki/Chemical_table_file#Molfile)
-
-- **p** *jsonb*
-
-список свойств извлечённый из [SD-файла](https://en.wikipedia.org/wiki/Chemical_table_file#SDF)
-
-### Индексы
-
-#### ```bingo_idx_YYY```
-
-Для каждой таблицы ```bingo_structures_XXX``` создаётся индекс ```bingo_idx_YYY```, где ```YYY``` – результат хэширования имени таблицы. Полностью запрос для создания индекса выглядит следующим образом:
-
-```
-"create index {0} on {1} using bingo_idx (m bingo.bmolecule) with (IGNORE_STEREOCENTER_ERRORS=1,IGNORE_CISTRANS_ERRORS=1,FP_TAU_SIZE=0)".format(index_name, table_name)
-```
diff --git a/utils/indigo-service/doc/rfcs/000-template.md b/utils/indigo-service/doc/rfcs/000-template.md
deleted file mode 100644
index 704e509d3e..0000000000
--- a/utils/indigo-service/doc/rfcs/000-template.md
+++ /dev/null
@@ -1,33 +0,0 @@
-- Start Date: (fill me in with today's date, YYYY-MM-DD)
-- Implementation MR: (leave this empty)
-- Source issue: (should be omitted, if none exists)
-- Status: (In progress, Implemented, Rejected, Deffered)
-- Scope: (db, api, infrastructure etc)
-
-# Summary
-
-One paragarph explanation of the feature.
-
-# Motivation
-
-Why are we doing this? What use cases does it support?
-What is the expected outcome?
-
-# Detailed design
-
-This is the bulk of the RFC. Explain the design in enough detail for somebody
-familiar with the service internals to understand and implement.
-This should get into specifics and corner-cases, and include examples of how
-the feature is used.
-
-# Drawbacks
-
-Why should we *not* do this?
-
-# Alternatives
-
-What other designs have been considered? What is the impact of not doing this?
-
-# Unresolved questions
-
-What parts of the design are still TBD?
diff --git a/utils/indigo-service/doc/rfcs/text/005-index-data-column.md b/utils/indigo-service/doc/rfcs/text/005-index-data-column.md
deleted file mode 100644
index f8da3ffbc3..0000000000
--- a/utils/indigo-service/doc/rfcs/text/005-index-data-column.md
+++ /dev/null
@@ -1,73 +0,0 @@
-- Start Date: 2015-11-03
-- Implementation MR: !4
-- Source issue: #47
-- Status: Implemented
-- Scope: db, optimization
-
-# Summary
-
-Add ```index_data``` column to ```library_metadata``` table.
-
-# Motivation
-
-Right now ```library_metadata.service_data``` *jsonb* column is being populated
-with **properties** field when a client uploads SDF collection to a service. We
-need this information for the autocomplete feature of our web-client, and
-**properties** are dispatched along with other library information:
-
-```
-$ curl -s localhost:5000/v1/libraries/9ca31f91-2f5a-4c01-adb4-4fc137ff8a4f
-{
- "service_data" : {
- "properties" : [
- "P450_3A4_CSL_Uncertainty",
- "IUPAC_NAME",
- "STATUS_Probability",
- # long list of possible property names within library
- ...
- ],
- "updated_timestamp" : 1446742469199,
- "created_timestamp" : 1446742437871,
- "structures_count" : 108,
- "name" : "My library"
- },
- "metadata" : {
- "comment" : "Some notes."
- }
-}
-```
-
-However, it seems impractical to send these specific details every time a
-client requests general library information and, on the whole,
-```service_data``` sounds like a wrong place to store SDF properties.
-
-A new *jsonb* ```library_metadata.index_data``` column would increase data
-decoupling.
-
-# Detailed design
-
-Database schema should be updated in order to contain new ```index_data```
-column in ```library_metadata``` table. At the simplest, this column would
-hold unique properties set for every library table:
-
-```
-{
- "properties" : [
- "P450_3A4_CSL_Uncertainty",
- "IUPAC_NAME",
- "STATUS_Probability",
- # long list of possible property names within library
- ...
- ]
-}
-```
-
-The change should be transparent for the current API (and hence, possible
-consumers).
-
-# Further considerations
-
-A separate change to the API should be made to introduce a new method for
-retrieving data from the new column. That would allow to remove **properties**
-from ```GET /libraries/{library_id}``` response. This change is out of scope of
-this RFC, however.
diff --git a/utils/indigo-service/doc/rfcs/text/010-docker-compose-env.md b/utils/indigo-service/doc/rfcs/text/010-docker-compose-env.md
deleted file mode 100644
index f4928fc2f5..0000000000
--- a/utils/indigo-service/doc/rfcs/text/010-docker-compose-env.md
+++ /dev/null
@@ -1,92 +0,0 @@
-- Start Date: 2015-01-14
-- Implementation MR: !10
-- Source issue: #36
-- Status:
-- Scope: infrastructure
-
-# Summary
-
-* use docker-compose instead of direct docker containers.
-* minimize image sizes.
-* split nginx and flask services
-* remove java
-* use artifactory to keep binaries
-
-# Motivation
-
-Right now there are lot of difficult commands to execute containers. We need to rememeber ports, volumes etc
-Project is getting bigger and bigger and we need to minimize the compilation time and add new services (Imago) without losing simple processing
-
-
-# Detailed design
-
-The general approach is to use `docker-compose` utility
-
-https://docs.docker.com/compose/
-
-You can install docker-compose as a python module
-
-https://docs.docker.com/compose/install/
-
-```
-docker-compose $(CMD)
-```
-
-Or if you have just docker installed, use the following script (was downloaded from link above)
-
-```
-./run_dc.sh $(CMD)
-```
-
-## Command line
-
-### Download all artifacts
-
-```
-cd ./lib/ && source download.sh
-```
-
-### Build images
-```
-docker-compose build
-```
-
-### Run containers (detached)
-```
-docker-compose up -d
-```
-
-### Stop containers
-```
-docker-compose stop
-docker-compose stop $(NAME)
-```
-### Remove containers
-
-```
-docker-compose rm -f
-```
-
-
-# Unresolved questions
-
-* How to remove all links between containers?
-* Hot to build binaries into artifactory?
-* How to "lock" python envinronmet
-
-# Things to do after merge
-
-* Update README
-* Remove files
- * db.Dockerfile
- * env.Dockerfile
- * Dockerfile
- * upload/
- * third_party/indigo
- * lib/*.zip
-* Create compose files for DEV and TEST. [Details](https://docs.docker.com/compose/extends/#example-use-case)
-* Update python libs for debian:jessie
-* ...
-
-
-
diff --git a/utils/indigo-service/doc/swagger.yaml b/utils/indigo-service/doc/swagger.yaml
deleted file mode 100644
index 6cd1773028..0000000000
--- a/utils/indigo-service/doc/swagger.yaml
+++ /dev/null
@@ -1,264 +0,0 @@
----
-swagger: '2.0'
-basePath: /v1
-info:
- title: 'Indigo REST'
- version: '1.0'
-produces:
- - application/json
-tags:
- - name: library
- description: 'Methods to maniputate your libraries'
- - name: search
- description: 'Methods for search queries'
- - name: ketcher
- description: 'Methods for ketcher editor'
- - name: misc
- description: 'Other various methods'
-paths:
- /libraries:
- get:
- description: 'Returns a library list'
- responses:
- 200:
- description: 'A list of available libraries'
- schema:
- type: object
- tags:
- - library
- post:
- description: 'Creates a new library'
- consumes:
- - application/json
- parameters:
- - name: body
- in: body
- description: 'Library data payload (name and optional extra data)'
- required: true
- responses:
- 201:
- description: 'Result object'
- schema:
- type: object
- tags:
- - library
- /libraries/{library_id}:
- get:
- description: 'Returns a library based on ID'
- parameters:
- - name: library_id
- in: path
- description: 'ID of a library to get'
- required: true
- type: string
- responses:
- 200:
- description: 'Library object'
- schema:
- type: object
- 404:
- description: 'Library not found'
- tags:
- - library
- put:
- description: 'Updates library data'
- consumes:
- - application/json
- parameters:
- - name: library_id
- in: path
- description: 'ID of a library to update'
- required: true
- type: string
- - name: body
- in: body
- description: 'New library data'
- required: true
- responses:
- 200:
- description: 'Result object'
- schema:
- type: object
- 404:
- description: 'Library not found'
- tags:
- - library
- delete:
- description: 'Removes a library'
- parameters:
- - name: library_id
- in: path
- description: 'ID of a library to delete'
- required: true
- type: string
- responses:
- 200:
- description: 'Result object'
- schema:
- type: object
- 404:
- description: 'Library not found'
- tags:
- - library
- /libraries/{library_id}/uploads:
- post:
- description: 'Uploads data to a selected library'
- consumes:
- - chemical/x-mdl-sdfile
- - application/x-gzip
- - application/gzip
- parameters:
- - name: library_id
- in: path
- description: 'ID of a target library'
- required: true
- type: string
- - name: body
- in: body
- description: 'Library contents payload'
- required: true
- responses:
- 200:
- description: 'Result object'
- schema:
- type: object
- 404:
- description: 'Library not found'
- 415:
- description: 'Unsupported Media Type'
- tags:
- - library
- /libraries/{library_id}/uploads/{upload_id}:
- get:
- description: 'Check upload status for specified upload_id'
- parameters:
- - name: library_id
- in: path
- description: 'ID of a target library'
- required: true
- type: string
- - name: upload_id
- in: path
- description: 'ID of an upload task'
- required: true
- type: string
- responses:
- 200:
- description: 'Result object'
- schema:
- type: object
- tags:
- - misc
- /render:
- post:
- description: 'Returns a molecule image'
- consumes:
- - application/json
- produces:
- - image/png
- - image/svg+xml
- - application/pdf
- - image/png;base64
- parameters:
- - name: body
- in: body
- description: 'A payload describing a structure and render options'
- required: true
- responses:
- 200:
- description: 'A rendered molecule image'
- schema:
- type: file
- 400:
- description: 'A problem with supplied client data'
- tags:
- - misc
- /info:
- get:
- description: 'Provides an information about service'
- responses:
- 200:
- description: 'Information on various parts of the service'
- schema:
- type: object
- tags:
- - misc
- /search:
- post:
- description: 'Performs a search over specified libraries'
- produces:
- - application/json
- parameters:
- - name: body
- in: body
- description: 'A payload describing search parameters'
- required: true
- responses:
- 200:
- description: 'A list of found items'
- # TODO describe a response schema
- schema:
- type: object
- tags:
- - search
- /ketcher/knocknock:
- get:
- description: 'returns if ketcher availabl'
- produces:
- - text/plain
- tags:
- - ketcher
- /ketcher/aromatize:
- post:
- description: 'Performs molecules aromatization'
- produces:
- - text/plain
- parameters:
- - name: body
- in: body
- description: 'Molecular structure'
- required: true
- responses:
- 200:
- description: 'Aromatized structure'
- # TODO describe a response schema
- schema:
- type: object
- tags:
- - ketcher
- /ketcher/dearomatize:
- post:
- description: 'Performs molecules de-aromatization'
- produces:
- - text/plain
- parameters:
- - name: body
- in: body
- description: 'Molecular structure'
- required: true
- responses:
- 200:
- description: 'De-aromatized structure'
- # TODO describe a response schema
- schema:
- type: object
- tags:
- - ketcher
- /ketcher/calculate_cip:
- post:
- description: 'Performs CIP descriptors calculation'
- produces:
- - text/plain
- parameters:
- - name: body
- in: body
- description: 'Molecular structure'
- required: true
- responses:
- 200:
- description: 'Structure'
- # TODO describe a response schema
- schema:
- type: object
- tags:
- - ketcher
diff --git a/utils/indigo-service/docker-compose.yml b/utils/indigo-service/docker-compose.yml
index 704db98146..238989d0c8 100644
--- a/utils/indigo-service/docker-compose.yml
+++ b/utils/indigo-service/docker-compose.yml
@@ -1,67 +1,36 @@
-version: "2.1"
+version: "3.4"
+
services:
db:
build:
- context: ./
- dockerfile: ./db/Dockerfile
- networks:
- default:
- aliases:
- - indigo_db
+ context: ./db/
+ dockerfile: ./Dockerfile
+ env_file: ./env/db.env
+ ports:
+ - "5432:5432"
+ volumes:
+ - db_pgdata:/var/lib/postgresql/data
+ restart: always
- service:
+ backend:
build:
- context: ./
- dockerfile: ./service/Dockerfile
- environment:
- - PYTHONPATH=${INDIGO_SERVICE_PYTHONPATH:-/srv/indigo-python}
- volumes_from:
- - indigo_builder
- networks:
- default:
- aliases:
- - indigo_service
+ context: ./backend/
+ dockerfile: ./Dockerfile
+ env_file: ./env/db.env
+ depends_on:
+ - db
restart: always
command: supervisord -n
- front:
+ frontend:
build:
- context: ./
- dockerfile: ./nginx/Dockerfile
- links:
- - service
+ context: ./frontend/
+ dockerfile: ./Dockerfile
+ depends_on:
+ - backend
ports:
- - "${INDIGO_SERVICE_PORT:-8080}:80"
+ - "80:80"
restart: always
- indigo_builder:
- build:
- context: indigo-builder
- args:
- - ARTIFACTORY_API_KEY
- volumes:
- - /var/src
-
- test:
- build:
- context: ./
- dockerfile: ./service/test/Dockerfile
- environment:
- - INDIGO_SERVICE_URL
- - IGNORE_PATTERN
- command: python3 -u /srv/api/test/tests.py
-
-# debug:
-# build:
-# context: ./
-# dockerfile: ./service/Dockerfile
-# environment:
-# - PYTHONPATH=${INDIGO_SERVICE_PYTHONPATH:-/srv/indigo-python}
-# - INDIGO_UWSGI_RUN_PARAMETERS=--plugin python3 --py-autoreload=1
-# volumes:
-# - ./service/:/srv/api/
-# networks:
-# default:
-# aliases:
-# - indigo_service
-# command: supervisord -n
+volumes:
+ db_pgdata:
diff --git a/utils/indigo-service/env/db.env b/utils/indigo-service/env/db.env
new file mode 100644
index 0000000000..469e202528
--- /dev/null
+++ b/utils/indigo-service/env/db.env
@@ -0,0 +1 @@
+POSTGRES_PASSWORD=p@ssw0rd
diff --git a/utils/indigo-service/frontend/Dockerfile b/utils/indigo-service/frontend/Dockerfile
new file mode 100644
index 0000000000..0669eed2c0
--- /dev/null
+++ b/utils/indigo-service/frontend/Dockerfile
@@ -0,0 +1,39 @@
+FROM node:12-slim as ui-builder
+RUN apt update && \
+ apt upgrade -y
+COPY ./ui /ui
+WORKDIR /ui
+RUN npm install
+RUN npm run gulp archive
+
+FROM nginx:latest
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt update && \
+ apt upgrade -y && \
+ apt install -y --no-install-recommends \
+ unzip
+
+# Install UI
+COPY --from=ui-builder /ui/indigo-service-ui*.zip /opt/
+RUN cd /opt && \
+ unzip indigo-service-ui-*.zip && \
+ mkdir -p /var/www/ && \
+ mv indigo-service-ui*/ /var/www/ui/
+
+# Install Ketcher
+#ADD https://github.com/epam/ketcher/releases/download/v2.5.1/ketcher-remote-2.5.1.zip /opt/ketcher-remote-2.5.1.zip
+ADD https://github.com/epam/ketcher/releases/download/v2.1.0/ketcher-2.1.0.zip /opt/ketcher-2.1.0.zip
+RUN cd /opt && \
+ unzip ketcher*.zip && \
+ # mv remote/ /srv/ketcher/ && \
+ mv ketcher/ /srv/ketcher/ && \
+ ls /srv/ketcher && \
+ cp /srv/ketcher/ketcher.html /srv/ketcher/index.html
+
+COPY ./nginx/nginx.conf /etc/nginx/conf.d/default.conf
+
+# Clean
+RUN apt autoremove -y && \
+ rm -rf /opt/* /var/lib/apt/lists/*
diff --git a/utils/indigo-service/nginx/nginx.conf b/utils/indigo-service/frontend/nginx/nginx.conf
similarity index 85%
rename from utils/indigo-service/nginx/nginx.conf
rename to utils/indigo-service/frontend/nginx/nginx.conf
index 82e7556621..8f8377a23b 100644
--- a/utils/indigo-service/nginx/nginx.conf
+++ b/utils/indigo-service/frontend/nginx/nginx.conf
@@ -10,21 +10,15 @@ server {
send_timeout 600;
location / {
- root /var/www/client;
+ root /var/www/ui;
index index.html;
try_files $uri $uri/ @indigoservice;
}
- location /doc {
- root /var/www;
- index index.html;
- try_files $uri $uri/index.html $uri/;
- }
-
location /ketcher/ {
add_header 'Access-Control-Allow-Origin' '*';
root /srv;
- index ketcher.html;
+ index index.html;
try_files $uri $uri/ @indigoservice;
}
@@ -38,6 +32,6 @@ server {
add_header 'Access-Control-Allow-Headers' 'Accept, Content-Type' always;
add_header 'Access-Control-Max-Age' '86400' always;
include uwsgi_params;
- uwsgi_pass indigo_service:8002;
+ uwsgi_pass backend:8002;
}
}
diff --git a/utils/indigo-service-client/.gitignore b/utils/indigo-service/frontend/ui/.gitignore
similarity index 100%
rename from utils/indigo-service-client/.gitignore
rename to utils/indigo-service/frontend/ui/.gitignore
diff --git a/utils/indigo-service-client/README.md b/utils/indigo-service/frontend/ui/README.md
similarity index 62%
rename from utils/indigo-service-client/README.md
rename to utils/indigo-service/frontend/ui/README.md
index c559bfaf1d..fcad106f2b 100644
--- a/utils/indigo-service-client/README.md
+++ b/utils/indigo-service/frontend/ui/README.md
@@ -1,15 +1,13 @@
# EPAM Indigo projects #
-Copyright (c) 2009-2016 EPAM Systems
-GNU General Public License version 3
+Copyright (c) 2009-2022 EPAM Systems
## Introduction ##
-This repository includes web client for [indigo-service](https://git.epam.com/epm-lsop/indigo-service) project
+This repository includes web UI for Indigo service
## Source code organization ##
-
## Build instructions ##
```
diff --git a/utils/indigo-service-client/gulpfile.js b/utils/indigo-service/frontend/ui/gulpfile.js
similarity index 85%
rename from utils/indigo-service-client/gulpfile.js
rename to utils/indigo-service/frontend/ui/gulpfile.js
index fee8e3210a..c01ee66302 100644
--- a/utils/indigo-service-client/gulpfile.js
+++ b/utils/indigo-service/frontend/ui/gulpfile.js
@@ -1,5 +1,5 @@
var gulp = require('gulp-v3');
-var gutil = require('gulp-util');
+var log = require('fancy-log');
var plugins = require('gulp-load-plugins')();
var browserify = require('browserify');
@@ -25,17 +25,17 @@ var options = minimist(process.argv.slice(2), {
var polyfills = ['array.prototype.findindex',
'object.assign/dist/browser.js'];
-gulp.task('script', ['patch-version'], function() {
+gulp.task('script', [], function() {
return scriptBundle('script/index.js')
// Don't transform, see: http://git.io/vcJlV
.pipe(source('index.js')).pipe(buffer())
.pipe(plugins.sourcemaps.init({ loadMaps: true }))
- .pipe(plugins.uglify())
+ // .pipe(plugins.uglify())
.pipe(plugins.sourcemaps.write('./'))
.pipe(gulp.dest('dist'));
});
-gulp.task('script-watch', ['patch-version'], function () {
+gulp.task('script-watch', [], function () {
return scriptBundle('script/index.js', function (bundle) {
return bundle.pipe(source('index.js'))
.pipe(gulp.dest('./dist'))
@@ -58,7 +58,7 @@ gulp.task('style', function () {
gulp.task('mithril', function () {
var basename = 'mithril',
path = require.resolve(basename);
- return gulp.src(path.replace('.js', '.min.js'))
+ return gulp.src(path)
.pipe(plugins.rename({ basename: basename }))
.pipe(gulp.dest('dist'));
});
@@ -70,7 +70,7 @@ gulp.task('codemirror', function () {
.require('codemirror').bundle()
.pipe(source('codemirror.js')).pipe(buffer())
.pipe(plugins.sourcemaps.init({ loadMaps: true }))
- .pipe(plugins.uglify())
+ // .pipe(plugins.uglify())
.pipe(plugins.sourcemaps.write('./'))
.pipe(gulp.dest('dist'));
});
@@ -86,7 +86,8 @@ gulp.task('clean', function () {
});
gulp.task('archive', ['assets', 'code'], function (cb) {
- return gulp.src(['dist/**', '!**/*.map'])
+ // return gulp.src(['dist/**', '!**/*.map'])
+ return gulp.src(['dist/**'])
.pipe(plugins.rename({ dirname: pkg.name }))
.pipe(plugins.zip(pkg.name + '-' + pkg.version + '.zip'))
.pipe(gulp.dest('.'));
@@ -119,21 +120,6 @@ gulp.task('serve', ['assets', 'style', 'script-watch'], function() {
});
});
-gulp.task('patch-version', function (cb) {
- if (pkg.rev)
- return cb();
- cp.exec('git rev-list ' + pkg.version + '..HEAD --count', function (err, stdout, _) {
- if (err && err.code != 127) // not "command not found"
- gutil.log('Could not fetch revision. ' +
- 'Please git tag the package version.');
- else if (!err && stdout > 0) {
- pkg.rev = stdout.toString().trim();
- pkg.version += ('+r' + pkg.rev);
- }
- cb(err);
- });
-});
-
function scriptBundle(src, watchUpdate) {
var build = browserify(src, {
cache: {}, packageCache: {},
@@ -169,11 +155,11 @@ function scriptBundle(src, watchUpdate) {
var rebuild = function () {
return watchUpdate(build.bundle().on('error', function (err) {
- gutil.log(err.message);
+ log(err.message);
}));
};
build.plugin(watchify);
- build.on('log', gutil.log.bind(null, 'Script update:'));
+ build.on('log', log.bind(null, 'Script update:'));
build.on('update', rebuild);
return rebuild();
}
diff --git a/utils/indigo-service-client/index.html b/utils/indigo-service/frontend/ui/index.html
similarity index 100%
rename from utils/indigo-service-client/index.html
rename to utils/indigo-service/frontend/ui/index.html
diff --git a/utils/indigo-service-client/package.json b/utils/indigo-service/frontend/ui/package.json
similarity index 78%
rename from utils/indigo-service-client/package.json
rename to utils/indigo-service/frontend/ui/package.json
index f09f6aa5e1..ac7cda296d 100644
--- a/utils/indigo-service-client/package.json
+++ b/utils/indigo-service/frontend/ui/package.json
@@ -1,15 +1,15 @@
{
- "name": "indigo-service-client",
- "version": "0.3.0",
- "description": "Demo client for indigo-service",
- "repository": "https://git.epam.com/epm-lsop/indigo-service-client.git",
+ "name": "indigo-service-ui",
+ "version": "1.7.4",
+ "description": "UI for Indigo REST service",
+ "repository": "https://github.com/epam/Indigo",
"keywords": [
"indigo",
"web",
"service"
],
"author": "EPAM Systems",
- "license": "WTFPL",
+ "license": "Apache-2.0",
"devDependencies": {
"browser-sync": "^2.10.0",
"browserify": "^12.0.1",
@@ -24,7 +24,7 @@
"gulp-rename": "^1.2.2",
"gulp-sourcemaps": "^1.6.0",
"gulp-uglify": "^1.5.1",
- "gulp-util": "^3.0.7",
+ "fancy-log": "^2.0.0",
"gulp-zip": "^3.2.0",
"http-proxy": "^1.13.2",
"minimist": "^1.2.0",
@@ -37,8 +37,8 @@
"array.prototype.findindex": "^1.0.0",
"bootstrap": "^3.3.6",
"codemirror": "^5.9.0",
- "mithril": "^2.0.4",
- "murmurhash": "0.0.2",
+ "mithril": "^0.2.8",
+ "murmurhash": "^0.0.2",
"object.assign": "^4.0.3",
"query-string": "^3.0.0"
},
diff --git a/utils/indigo-service-client/script/api.js b/utils/indigo-service/frontend/ui/script/api.js
similarity index 95%
rename from utils/indigo-service-client/script/api.js
rename to utils/indigo-service/frontend/ui/script/api.js
index 32cc888da4..73bab23397 100644
--- a/utils/indigo-service-client/script/api.js
+++ b/utils/indigo-service/frontend/ui/script/api.js
@@ -93,9 +93,7 @@ function api(base) {
sdfExport: apiCall('GET', 'libraries/search/:search_id.sdf'),
render: apiCall('POST', 'indigo/render', {
deserialize: function(data) { return data; }
- }),
- imagoUpload: apiCall('POST', 'imago/uploads'),
- imagoUploadStatus: apiCall('GET', 'imago/uploads/:upload')
+ })
};
}
diff --git a/utils/indigo-service-client/script/echo.js b/utils/indigo-service/frontend/ui/script/echo.js
similarity index 100%
rename from utils/indigo-service-client/script/echo.js
rename to utils/indigo-service/frontend/ui/script/echo.js
diff --git a/utils/indigo-service/frontend/ui/script/index.js b/utils/indigo-service/frontend/ui/script/index.js
new file mode 100644
index 0000000000..f5e710fada
--- /dev/null
+++ b/utils/indigo-service/frontend/ui/script/index.js
@@ -0,0 +1,72 @@
+var m = require('mithril');
+var qs = require('query-string');
+
+var searchView = require('./search');
+var libsView = require('./libs');
+var api = require('./api');
+
+var app = {
+ version: '__VERSION__',
+ api_path: '__API_PATH__',
+ libs: [],
+ pages: [{
+ url: '/search',
+ view: searchView,
+ title: 'Search'
+ }, {
+ url: '/libs',
+ view: libsView,
+ title: 'Libraries'
+ }]
+};
+
+app.view = function (page) {
+ console.info('redraw', page.url.slice(1));
+ return [
+ m('nav', [
+ m('h1', 'Indigo Online'),
+ m('ul', app.pages.map(function (pg) {
+ return m('li', {'class': page.url == pg.url ? 'active' : ''},
+ m('a', {href: pg.url, config: m.route}, pg.title));
+ }))
+ ]),
+ m('main', {'class': page.url.slice(1)}, [
+ m('iframe', {
+ src: '/ketcher/?api_path=/v2',
+ onload: function () {
+ app.ketcher = this.contentWindow.ketcher;
+ }
+ }),
+ page.view(app)
+ ])
+ ];
+};
+
+//initialize
+window.onload = function () {
+ //document.title += ' v' + app.version;
+ var opts = qs.parse(location.search);
+ app.api_path = opts.api_path || app.api_path;
+ app.server = api(app.api_path);
+
+ app.server.libList().then(function (res) {
+ res.forEach(function (lib) {
+ lib.info = app.server.libInfo({id: lib.id});
+ });
+ res.sort(function (a, b) {
+ return b.created_timestamp - a.created_timestamp;
+ });
+ app.libs = res;
+ });
+
+ m.route.mode = "hash";
+ m.route(document.body, '/search', app.pages.reduce(function (res, page) {
+ res[page.url] = {
+ view: app.view.bind(app, page),
+ controller: function () {
+ m.redraw.strategy('diff');
+ }
+ };
+ return res;
+ }, {}));
+};
diff --git a/utils/indigo-service-client/script/libs.js b/utils/indigo-service/frontend/ui/script/libs.js
similarity index 100%
rename from utils/indigo-service-client/script/libs.js
rename to utils/indigo-service/frontend/ui/script/libs.js
diff --git a/utils/indigo-service-client/script/output.js b/utils/indigo-service/frontend/ui/script/output.js
similarity index 100%
rename from utils/indigo-service-client/script/output.js
rename to utils/indigo-service/frontend/ui/script/output.js
diff --git a/utils/indigo-service-client/script/propedit.js b/utils/indigo-service/frontend/ui/script/propedit.js
similarity index 100%
rename from utils/indigo-service-client/script/propedit.js
rename to utils/indigo-service/frontend/ui/script/propedit.js
diff --git a/utils/indigo-service-client/script/search.js b/utils/indigo-service/frontend/ui/script/search.js
similarity index 96%
rename from utils/indigo-service-client/script/search.js
rename to utils/indigo-service/frontend/ui/script/search.js
index fabd9fcfe7..17910b99f4 100644
--- a/utils/indigo-service-client/script/search.js
+++ b/utils/indigo-service/frontend/ui/script/search.js
@@ -153,6 +153,11 @@ var selectView = function(attrs, prop) {
function submit(ketcher, event) {
var molfile = ketcher.getMolfile(),
isEmpty = molfile.split('\n').length <= 6;
+// var molfile = "";
+// ketcher.getMolfile().then(m => {
+// molfile = m;
+// });
+// var isEmpty = molfile.split('\n').length <= 6;
query = JSON.parse(JSON.stringify(request)); // poor man's clone
query.query_structure = !isEmpty ? molfile : undefined;
result('search');
diff --git a/utils/indigo-service-client/script/x.js b/utils/indigo-service/frontend/ui/script/x.js
similarity index 100%
rename from utils/indigo-service-client/script/x.js
rename to utils/indigo-service/frontend/ui/script/x.js
diff --git a/utils/indigo-service-client/style/index.less b/utils/indigo-service/frontend/ui/style/index.less
similarity index 97%
rename from utils/indigo-service-client/style/index.less
rename to utils/indigo-service/frontend/ui/style/index.less
index fdcb29db8c..a24d30f505 100644
--- a/utils/indigo-service-client/style/index.less
+++ b/utils/indigo-service/frontend/ui/style/index.less
@@ -87,6 +87,3 @@ table.libs {
@import 'output';
}
-.imago {
- @import 'imago';
-}
diff --git a/utils/indigo-service-client/style/libs.less b/utils/indigo-service/frontend/ui/style/libs.less
similarity index 100%
rename from utils/indigo-service-client/style/libs.less
rename to utils/indigo-service/frontend/ui/style/libs.less
diff --git a/utils/indigo-service-client/style/output.less b/utils/indigo-service/frontend/ui/style/output.less
similarity index 100%
rename from utils/indigo-service-client/style/output.less
rename to utils/indigo-service/frontend/ui/style/output.less
diff --git a/utils/indigo-service-client/style/propedit.less b/utils/indigo-service/frontend/ui/style/propedit.less
similarity index 100%
rename from utils/indigo-service-client/style/propedit.less
rename to utils/indigo-service/frontend/ui/style/propedit.less
diff --git a/utils/indigo-service-client/style/search.less b/utils/indigo-service/frontend/ui/style/search.less
similarity index 100%
rename from utils/indigo-service-client/style/search.less
rename to utils/indigo-service/frontend/ui/style/search.less
diff --git a/utils/indigo-service-client/style/theme.less b/utils/indigo-service/frontend/ui/style/theme.less
similarity index 100%
rename from utils/indigo-service-client/style/theme.less
rename to utils/indigo-service/frontend/ui/style/theme.less
diff --git a/utils/indigo-service-client/style/variables.less b/utils/indigo-service/frontend/ui/style/variables.less
similarity index 100%
rename from utils/indigo-service-client/style/variables.less
rename to utils/indigo-service/frontend/ui/style/variables.less
diff --git a/utils/indigo-service/indigo-builder/Dockerfile b/utils/indigo-service/indigo-builder/Dockerfile
deleted file mode 100644
index 12fe0f084f..0000000000
--- a/utils/indigo-service/indigo-builder/Dockerfile
+++ /dev/null
@@ -1,15 +0,0 @@
-FROM debian:stretch
-ENV DEBIAN_FRONTEND noninteractive
-ARG ARTIFACTORY_API_KEY
-
-RUN apt-get update -qq && \
- apt-get install -qq -y --no-install-recommends curl ca-certificates unzip
-
-RUN cd /opt && \
- curl -OL -H "X-JFrog-Art-Api:$ARTIFACTORY_API_KEY" -X GET "https://artifactory.epam.com/artifactory/EPM-LSOP/indigo/ci/indigo-python-latest-linux.zip" && \
- unzip indigo-python-* -d dist && \
- mkdir -p /var/src/Indigo/build/ && \
- mv dist/indigo-python-* /var/src/Indigo/build/indigo-python
-
-RUN apt-get purge -qq -y curl ca-certificates unzip
-
diff --git a/utils/indigo-service/lib/README.md b/utils/indigo-service/lib/README.md
deleted file mode 100644
index 622991fd47..0000000000
--- a/utils/indigo-service/lib/README.md
+++ /dev/null
@@ -1,11 +0,0 @@
-Download latest versions of Bingo, Ketcher and Imago libraries
-Configuration was tested with
-
-```
-bingo-postgres9.6-1.8.0.dev2.r5-linux64.7z
-imago-console-v2.1-dev1-0-ge78b2cb-linux64.7z
-imago-console-v10.1-dev1-1-ge78b2cb-linux64.7z
-indigo-service-client-0.3.0+r50.zip
-indigo-uploader-0.1.1-linux64.zip
-ketcher-2.0.0.zip
-```
diff --git a/utils/indigo-service/lib/favicon.ico b/utils/indigo-service/lib/favicon.ico
deleted file mode 100644
index 125d4c9a5a..0000000000
Binary files a/utils/indigo-service/lib/favicon.ico and /dev/null differ
diff --git a/utils/indigo-service/nginx/Dockerfile b/utils/indigo-service/nginx/Dockerfile
deleted file mode 100644
index 7c0f10a16e..0000000000
--- a/utils/indigo-service/nginx/Dockerfile
+++ /dev/null
@@ -1,33 +0,0 @@
-FROM library/nginx:latest
-
-ENV DEBIAN_FRONTEND=noninteractive
-
-RUN apt-get update -qq && apt-get install -y --no-install-recommends unzip
-
-# Install swagger
-COPY ./lib/swagger-ui-*.zip /opt/
-RUN cd /opt && \
- unzip swagger-ui-* && \
- mkdir -p /var/www && \
- mv doc/ /var/www/
-
-# Install client
-COPY ./lib/indigo-service-client-*.zip /opt/
-RUN cd /opt && \
- unzip indigo-service-client-* && \
- mkdir -p /var/www/ && \
- mv indigo-service-client*/ /var/www/client/
-
-# Install Ketcher
-COPY ./lib/ketcher*.zip /opt/
-RUN cd /opt && \
- unzip ketcher* && \
- mv ketcher*/ /srv/ketcher/
-
-COPY ./lib/favicon.ico /var/www/client/
-COPY ./nginx/nginx.conf /etc/nginx/conf.d/default.conf
-
-# Clean
-RUN apt-get autoremove -y && rm -rf /opt/* /var/lib/apt/lists/*
-
-
diff --git a/utils/indigo-service/service/Dockerfile b/utils/indigo-service/service/Dockerfile
deleted file mode 100644
index 30075530c2..0000000000
--- a/utils/indigo-service/service/Dockerfile
+++ /dev/null
@@ -1,66 +0,0 @@
-FROM ubuntu:20.04
-
-ENV DEBIAN_FRONTEND=noninteractive
-
-RUN apt-get update && apt-get upgrade -y
-
-RUN apt-get install -y --no-install-recommends \
- unzip python3 python3-pip python3-wheel python3-setuptools libfreetype6-dev libfontconfig1-dev
-
-# Write service version into file
-# COPY ./ /opt/git/
-# RUN cd /opt/git && \
-# git describe --tags >> /srv/service_version && \
-# find ./lib -name "imago-console-*" -exec basename -a {} + >> /srv/service_version
-
-# Install python dependencies using pip
-COPY ./service/requirements.txt /opt/
-RUN pip3 install --no-cache-dir -r /opt/requirements.txt
-
-# Install Celery
-# COPY ./celery/celeryd.conf /etc/default/celeryd
-# RUN useradd -ms /bin/bash celery || echo "User already exists."
-# RUN chmod 640 /etc/default/celeryd
-# COPY ./celery/celery.auto.conf /etc/supervisor/conf.d/
-
-# Install redis runner
-# COPY ./celery/redis.auto.conf /etc/supervisor/conf.d/
-
-# Install uwsgi runner
-# COPY ./uwsgi/uwsgi.ini /etc/uwsgi.ini
-# COPY ./uwsgi/uwsgi.auto.conf /etc/supervisor/conf.d/
-# ENV INDIGO_UWSGI_RUN_PARAMETERS --plugin python3
-
-# Install Indigo
-COPY ./lib/*indigo*linux*.whl /opt/
-RUN python3 -m pip install /opt/*indigo*manylinux1_x86_64*.whl
-
-# # Install Indigo Uploader
-# COPY ./lib/indigo-uploader-*.zip /opt/
-# RUN cd /opt && \
-# unzip indigo-uploader-* && \
-# mv indigo-uploader*/ /srv/uploader/
-#
-# COPY ./db/uploader_config.yml /srv/uploader/
-# RUN mkdir -p /tmp/indigo-service/upload
-# RUN chmod -R a+rw /tmp/indigo-service/
-#
-# # Install Imago
-# COPY ./lib/imago-console-*.7z /opt/
-# RUN cd /opt && \
-# mkdir /srv/imago && \
-# for imago_zip in $(cat /srv/service_version); do case "$imago_zip" in *imago*) 7z x $imago_zip; mv imago-console*/ /srv/imago/$(basename $imago_zip .7z)/ ;; esac; done
-# RUN chmod -R a+rx /srv/imago
-
-COPY ./lib/favicon.ico /srv/api/client/
-COPY ./service/v2/ /srv/api/v2/
-COPY ./service/*.py /srv/api/
-
-# Clean
-RUN apt-get purge -y unzip git python3-pip python3-wheel && \
- apt-get autoremove -y && \
- rm -rf /opt/* /var/lib/apt/lists/*
-
-EXPOSE 80
-WORKDIR /srv/api
-CMD gunicorn --bind 0.0.0.0:80 --workers=5 app:app
diff --git a/utils/indigo-service/service/requirements.txt b/utils/indigo-service/service/requirements.txt
deleted file mode 100644
index acfd0ad5f7..0000000000
--- a/utils/indigo-service/service/requirements.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-Flask
-# Flask-HTTPAuth
-# SQLAlchemy
-flask-restful
-flasgger
-marshmallow
-gunicorn
-# pyparsing
-# celery
-# setuptools
-# vine
-
diff --git a/utils/indigo-service/uwsgi/uwsgi.auto.conf b/utils/indigo-service/uwsgi/uwsgi.auto.conf
deleted file mode 100644
index 4b6bcd8217..0000000000
--- a/utils/indigo-service/uwsgi/uwsgi.auto.conf
+++ /dev/null
@@ -1,13 +0,0 @@
-[program:uwsgi]
-directory=/srv/api
-
-command=uwsgi %(ENV_INDIGO_UWSGI_RUN_PARAMETERS)s /etc/uwsgi.ini
-autostart=true
-autorestart=true
-#stdout_logfile=/var/log/uwsgi/stdout.log
-#stderr_logfile=/var/log/uwsgi/stderr.log
-
-redirect_stderr=true
-stdout_logfile=/dev/stdout
-stdout_logfile_maxbytes=0
-killasgroup=true
\ No newline at end of file