\ No newline at end of file
diff --git a/DEPENDENCIES b/DEPENDENCIES
deleted file mode 100644
index a2c84014..00000000
--- a/DEPENDENCIES
+++ /dev/null
@@ -1,4 +0,0 @@
-kb_sdk
-auth
-kbapi_common
-narrative_method_store
diff --git a/Dockerfile b/Dockerfile
index f45f312d..5768ed83 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,25 +1,43 @@
-FROM kbase/sdkbase2:python AS build
+FROM python:3.9.19 AS build
+# The rsync installation is required for the Makefile
+RUN apt-get update && apt-get install -y rsync
+RUN mkdir -p /kb/deployment/lib/biokbase
COPY . /tmp/catalog
RUN cd /tmp/catalog && make deploy-service deploy-server-control-scripts
-FROM kbase/sdkbase2:python
+FROM python:3.9.19
# These ARGs values are passed in via the docker build command
ARG BUILD_DATE
ARG VCS_REF
ARG BRANCH
+RUN apt-get update && apt-get install -y wget
+
+# install dockerize
+WORKDIR /opt
+RUN wget -q https://github.com/kbase/dockerize/raw/master/dockerize-linux-amd64-v0.6.1.tar.gz \
+ && tar xvzf dockerize-linux-amd64-v0.6.1.tar.gz \
+ && rm dockerize-linux-amd64-v0.6.1.tar.gz
+RUN mkdir -p /kb/deployment/bin/
+RUN ln -s /opt/dockerize /kb/deployment/bin/dockerize
+
ENV KB_DEPLOYMENT_CONFIG "/kb/deployment/conf/deploy.cfg"
COPY --from=build /kb/deployment/lib/biokbase /kb/deployment/lib/biokbase
COPY --from=build /kb/deployment/services /kb/deployment/services
COPY --from=build /tmp/catalog/deployment/conf /kb/deployment/conf
-SHELL ["/bin/bash", "-c"]
-COPY requirements.txt requirements.txt
-RUN source activate root && \
- pip install -r requirements.txt
+WORKDIR /tmp/catalog
+
+# install pipenv
+RUN pip install --upgrade pip && \
+ pip install pipenv
+
+# install deps
+COPY Pipfile* ./
+RUN pipenv sync --system
LABEL org.label-schema.build-date=$BUILD_DATE \
org.label-schema.vcs-url="https://github.com/kbase/catalog.git" \
diff --git a/KIDLspec.css b/KIDLspec.css
new file mode 100644
index 00000000..4d2a3e3a
--- /dev/null
+++ b/KIDLspec.css
@@ -0,0 +1,65 @@
+html, body {
+ height: 100%;
+}
+html {
+ display: table;
+ margin: auto;
+}
+body {
+ background-color: white;
+ color: #000;
+ font-family: Menlo, Monaco, Consolas, "Courier New", monospace;
+ font-weight: normal;
+ font-size: 12px;
+ margin: 0;
+ padding: 20px;
+ display: table-cell;
+ vertical-align: middle;
+}
+span.space {
+ display: inline-block;
+ width: 7px;
+}
+span.tab {
+ display: inline-block;
+ width: 30px;
+}
+span.keyword {
+ font-weight: bold;
+ color: #008;
+}
+span.name {
+ color: #000; !important
+}
+span.deprecated {
+ text-decoration: line-through;
+}
+span.annotation {
+ color: #303030;
+}
+span.primitive {
+ font-weight: bold;
+ color: #066;
+}
+div.body {
+ background-color: #ffffff;
+ color: #3e4349;
+ padding: 0 30px;
+}
+div.comment {
+ color: #A0A0A0;
+}
+a {
+ color: #004b6b;
+ text-decoration: none;
+}
+a:hover {
+ color: #6d4100;
+ text-decoration: underline;
+}
+:target {
+ background-color: #ffa;
+}
+div.body p, div.body dd, div.body li {
+ line-height: 1.4em;
+}
diff --git a/Makefile b/Makefile
index 6be1ab4d..ce536ce5 100644
--- a/Makefile
+++ b/Makefile
@@ -5,7 +5,6 @@ SERVICE_CAPS = Catalog
#$(shell perl server_scripts/get_deploy_cfg.pm $(SERVICE_CAPS).port)
SERVICE_PORT = 5000
SPEC_FILE = catalog.spec
-URL = https://kbase.us/services/catalog/rpc
#End of user defined variables
@@ -27,25 +26,20 @@ LIB_DIR = lib
PATH := kb_sdk/bin:$(PATH)
-default: init
-
-init:
- git submodule init
- git submodule update
compile-kb-module:
+ kb-sdk compile $(SPEC_FILE) \
+ --out . \
+ --html
kb-sdk compile $(SPEC_FILE) \
--out $(LIB_DIR) \
- --plclname Bio::KBase::$(SERVICE_CAPS)::Client \
- --jsclname javascript/Client \
--pyclname biokbase.$(SERVICE).Client \
- --javasrc java \
- --java \
--pysrvname biokbase.$(SERVICE).Server \
- --pyimplname biokbase.$(SERVICE).Impl;
- touch $(LIB_DIR)/biokbase/__init__.py
- touch $(LIB_DIR)/biokbase/$(SERVICE)/__init__.py
-
+ --pyimplname biokbase.$(SERVICE).Impl
+ kb-sdk compile $(SPEC_FILE) \
+ --out . \
+ --java \
+ --javasrc src/main/java
# start/stop the service running out of THIS directory
build-local-server-control-scripts:
@@ -102,10 +96,6 @@ setup-tests:
mkdir -p $(TESTLIB)/biokbase
mkdir -p $(TESTDIR)/nms
rsync -av lib/biokbase/* $(TESTLIB)/biokbase/. --exclude *.bak-*
- rsync -av kbapi_common/lib/biokbase/* $(TESTLIB)/biokbase/.
- cd narrative_method_store; make; make build-classpath-list;
-# rsync -av narrative_method_store/lib/biokbase/* $(TESTLIB)/biokbase/.
-
test: setup-tests
diff --git a/Pipfile b/Pipfile
new file mode 100644
index 00000000..90bcf0c7
--- /dev/null
+++ b/Pipfile
@@ -0,0 +1,18 @@
+[[source]]
+url = "https://pypi.org/simple"
+verify_ssl = true
+name = "pypi"
+
+[packages]
+docker = "==7.1.0"
+jsonrpcbase = "==0.2.0"
+pymongo = "==4.7.2"
+pyyaml = "==6.0.2"
+semantic-version = "==2.10.0"
+uwsgi = "==2.0.22"
+
+[dev-packages]
+coverage = "==7.6.1"
+
+[requires]
+python_version = "3.9.19"
diff --git a/Pipfile.lock b/Pipfile.lock
new file mode 100644
index 00000000..23d4a415
--- /dev/null
+++ b/Pipfile.lock
@@ -0,0 +1,406 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "e69e1bd51bda138e86072b2ad7a04f6a8afcd5d8c701a878db6cec814ff595b0"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.9.19"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651",
+ "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"
+ ],
+ "markers": "python_version >= '3.6'",
+ "version": "==2025.1.31"
+ },
+ "charset-normalizer": {
+ "hashes": [
+ "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537",
+ "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa",
+ "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a",
+ "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294",
+ "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b",
+ "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd",
+ "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601",
+ "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd",
+ "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4",
+ "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d",
+ "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2",
+ "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313",
+ "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd",
+ "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa",
+ "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8",
+ "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1",
+ "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2",
+ "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496",
+ "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d",
+ "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b",
+ "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e",
+ "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a",
+ "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4",
+ "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca",
+ "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78",
+ "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408",
+ "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5",
+ "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3",
+ "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f",
+ "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a",
+ "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765",
+ "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6",
+ "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146",
+ "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6",
+ "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9",
+ "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd",
+ "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c",
+ "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f",
+ "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545",
+ "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176",
+ "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770",
+ "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824",
+ "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f",
+ "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf",
+ "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487",
+ "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d",
+ "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd",
+ "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b",
+ "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534",
+ "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f",
+ "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b",
+ "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9",
+ "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd",
+ "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125",
+ "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9",
+ "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de",
+ "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11",
+ "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d",
+ "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35",
+ "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f",
+ "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda",
+ "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7",
+ "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a",
+ "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971",
+ "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8",
+ "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41",
+ "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d",
+ "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f",
+ "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757",
+ "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a",
+ "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886",
+ "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77",
+ "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76",
+ "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247",
+ "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85",
+ "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb",
+ "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7",
+ "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e",
+ "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6",
+ "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037",
+ "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1",
+ "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e",
+ "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807",
+ "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407",
+ "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c",
+ "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12",
+ "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3",
+ "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089",
+ "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd",
+ "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e",
+ "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00",
+ "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"
+ ],
+ "markers": "python_version >= '3.7'",
+ "version": "==3.4.1"
+ },
+ "dnspython": {
+ "hashes": [
+ "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50",
+ "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==2.6.1"
+ },
+ "docker": {
+ "hashes": [
+ "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c",
+ "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.8'",
+ "version": "==7.1.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9",
+ "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"
+ ],
+ "markers": "python_version >= '3.6'",
+ "version": "==3.10"
+ },
+ "jsonrpcbase": {
+ "hashes": [
+ "sha256:7ea67fc1a7c87756e9a876e18a342e431e80d0ef3ba867dfd6f3fac5bf3fcc0d"
+ ],
+ "index": "pypi",
+ "version": "==0.2.0"
+ },
+ "pymongo": {
+ "hashes": [
+ "sha256:02efd1bb3397e24ef2af45923888b41a378ce00cb3a4259c5f4fc3c70497a22f",
+ "sha256:0d833651f1ba938bb7501f13e326b96cfbb7d98867b2d545ca6d69c7664903e0",
+ "sha256:12c466e02133b7f8f4ff1045c6b5916215c5f7923bc83fd6e28e290cba18f9f6",
+ "sha256:12d1fef77d25640cb78893d07ff7d2fac4c4461d8eec45bd3b9ad491a1115d6e",
+ "sha256:194065c9d445017b3c82fb85f89aa2055464a080bde604010dc8eb932a6b3c95",
+ "sha256:1c78f156edc59b905c80c9003e022e1a764c54fd40ac4fea05b0764f829790e2",
+ "sha256:1e37faf298a37ffb3e0809e77fbbb0a32b6a2d18a83c59cfc2a7b794ea1136b0",
+ "sha256:25eeb2c18ede63891cbd617943dd9e6b9cbccc54f276e0b2e693a0cc40f243c5",
+ "sha256:268d8578c0500012140c5460755ea405cbfe541ef47c81efa9d6744f0f99aeca",
+ "sha256:2cb77d09bd012cb4b30636e7e38d00b5f9be5eb521c364bde66490c45ee6c4b4",
+ "sha256:347c49cf7f0ba49ea87c1a5a1984187ecc5516b7c753f31938bf7b37462824fd",
+ "sha256:35b3f0c7d49724859d4df5f0445818d525824a6cd55074c42573d9b50764df67",
+ "sha256:37e9ea81fa59ee9274457ed7d59b6c27f6f2a5fe8e26f184ecf58ea52a019cb8",
+ "sha256:47a1a4832ef2f4346dcd1a10a36ade7367ad6905929ddb476459abb4fd1b98cb",
+ "sha256:4bdb5ffe1cd3728c9479671a067ef44dacafc3743741d4dc700c377c4231356f",
+ "sha256:4ffd1519edbe311df73c74ec338de7d294af535b2748191c866ea3a7c484cd15",
+ "sha256:5239776633f7578b81207e5646245415a5a95f6ae5ef5dff8e7c2357e6264bfc",
+ "sha256:5239ef7e749f1326ea7564428bf861d5250aa39d7f26d612741b1b1273227062",
+ "sha256:56bf8b706946952acdea0fe478f8e44f1ed101c4b87f046859e6c3abe6c0a9f4",
+ "sha256:65b4c00dedbd333698b83cd2095a639a6f0d7c4e2a617988f6c65fb46711f028",
+ "sha256:6a87eef394039765679f75c6a47455a4030870341cb76eafc349c5944408c882",
+ "sha256:727ad07952c155cd20045f2ce91143c7dc4fb01a5b4e8012905a89a7da554b0c",
+ "sha256:730778b6f0964b164c187289f906bbc84cb0524df285b7a85aa355bbec43eb21",
+ "sha256:743552033c63f0afdb56b9189ab04b5c1dbffd7310cf7156ab98eebcecf24621",
+ "sha256:7e9d9d2c0aae73aa4369bd373ac2ac59f02c46d4e56c4b6d6e250cfe85f76802",
+ "sha256:82102e353be13f1a6769660dd88115b1da382447672ba1c2662a0fbe3df1d861",
+ "sha256:827611beb6c483260d520cfa6a49662d980dfa5368a04296f65fa39e78fccea7",
+ "sha256:84bc00200c3cbb6c98a2bb964c9e8284b641e4a33cf10c802390552575ee21de",
+ "sha256:87032f818bf5052ab742812c715eff896621385c43f8f97cdd37d15b5d394e95",
+ "sha256:87832d6076c2c82f42870157414fd876facbb6554d2faf271ffe7f8f30ce7bed",
+ "sha256:87bb453ac3eb44db95cb6d5a616fbc906c1c00661eec7f55696253a6245beb8a",
+ "sha256:9024e1661c6e40acf468177bf90ce924d1bc681d2b244adda3ed7b2f4c4d17d7",
+ "sha256:9349f0bb17a31371d4cacb64b306e4ca90413a3ad1fffe73ac7cd495570d94b5",
+ "sha256:9385654f01a90f73827af4db90c290a1519f7d9102ba43286e187b373e9a78e9",
+ "sha256:9a8bd37f5dabc86efceb8d8cbff5969256523d42d08088f098753dba15f3b37a",
+ "sha256:9d892fb91e81cccb83f507cdb2ea0aa026ec3ced7f12a1d60f6a5bf0f20f9c1f",
+ "sha256:a754e366c404d19ff3f077ddeed64be31e0bb515e04f502bf11987f1baa55a16",
+ "sha256:b48a5650ee5320d59f6d570bd99a8d5c58ac6f297a4e9090535f6561469ac32e",
+ "sha256:bcf337d1b252405779d9c79978d6ca15eab3cdaa2f44c100a79221bddad97c8a",
+ "sha256:c44efab10d9a3db920530f7bcb26af8f408b7273d2f0214081d3891979726328",
+ "sha256:c72d16fede22efe7cdd1f422e8da15760e9498024040429362886f946c10fe95",
+ "sha256:cb6e00a79dff22c9a72212ad82021b54bdb3b85f38a85f4fc466bde581d7d17a",
+ "sha256:ce1a374ea0e49808e0380ffc64284c0ce0f12bd21042b4bef1af3eb7bdf49054",
+ "sha256:cecd2df037249d1c74f0af86fb5b766104a5012becac6ff63d85d1de53ba8b98",
+ "sha256:cf17ea9cea14d59b0527403dd7106362917ced7c4ec936c4ba22bd36c912c8e0",
+ "sha256:cf28430ec1924af1bffed37b69a812339084697fd3f3e781074a0148e6475803",
+ "sha256:d1bcd58669e56c08f1e72c5758868b5df169fe267501c949ee83c418e9df9155",
+ "sha256:d275596f840018858757561840767b39272ac96436fcb54f5cac6d245393fd97",
+ "sha256:d2dcf608d35644e8d276d61bf40a93339d8d66a0e5f3e3f75b2c155a421a1b71",
+ "sha256:d4d59776f435564159196d971aa89422ead878174aff8fe18e06d9a0bc6d648c",
+ "sha256:d9b6cbc037108ff1a0a867e7670d8513c37f9bcd9ee3d2464411bfabf70ca002",
+ "sha256:db4380d1e69fdad1044a4b8f3bb105200542c49a0dde93452d938ff9db1d6d29",
+ "sha256:e004527ea42a6b99a8b8d5b42b42762c3bdf80f88fbdb5c3a9d47f3808495b86",
+ "sha256:e6eab12c6385526d386543d6823b07187fefba028f0da216506e00f0e1855119",
+ "sha256:eb0642e5f0dd7e86bb358749cc278e70b911e617f519989d346f742dc9520dfb",
+ "sha256:f91073049c43d14e66696970dd708d319b86ee57ef9af359294eee072abaac79",
+ "sha256:fadc6e8db7707c861ebe25b13ad6aca19ea4d2c56bf04a26691f46c23dadf6e4",
+ "sha256:fc5af24fcf5fc6f7f40d65446400d45dd12bea933d0299dc9e90c5b22197f1e9",
+ "sha256:fcaf8c911cb29316a02356f89dbc0e0dfcc6a712ace217b6b543805690d2aefd",
+ "sha256:ffd4d7cb2e6c6e100e2b39606d38a9ffc934e18593dc9bb326196afc7d93ce3d"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.7'",
+ "version": "==4.7.2"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff",
+ "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48",
+ "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086",
+ "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e",
+ "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133",
+ "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5",
+ "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484",
+ "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee",
+ "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5",
+ "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68",
+ "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a",
+ "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf",
+ "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99",
+ "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8",
+ "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85",
+ "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19",
+ "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc",
+ "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a",
+ "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1",
+ "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317",
+ "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c",
+ "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631",
+ "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d",
+ "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652",
+ "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5",
+ "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e",
+ "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b",
+ "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8",
+ "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476",
+ "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706",
+ "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563",
+ "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237",
+ "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b",
+ "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083",
+ "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180",
+ "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425",
+ "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e",
+ "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f",
+ "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725",
+ "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183",
+ "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab",
+ "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774",
+ "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725",
+ "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e",
+ "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5",
+ "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d",
+ "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290",
+ "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44",
+ "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed",
+ "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4",
+ "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba",
+ "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12",
+ "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.8'",
+ "version": "==6.0.2"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760",
+ "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==2.32.3"
+ },
+ "semantic-version": {
+ "hashes": [
+ "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c",
+ "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '2.7'",
+ "version": "==2.10.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274",
+ "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"
+ ],
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
+ "version": "==1.17.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac",
+ "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==2.2.3"
+ },
+ "uwsgi": {
+ "hashes": [
+ "sha256:4cc4727258671ac5fa17ab422155e9aaef8a2008ebb86e4404b66deaae965db2"
+ ],
+ "index": "pypi",
+ "version": "==2.0.22"
+ }
+ },
+ "develop": {
+ "coverage": {
+ "hashes": [
+ "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca",
+ "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d",
+ "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6",
+ "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989",
+ "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c",
+ "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b",
+ "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223",
+ "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f",
+ "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56",
+ "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3",
+ "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8",
+ "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb",
+ "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388",
+ "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0",
+ "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a",
+ "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8",
+ "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f",
+ "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a",
+ "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962",
+ "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8",
+ "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391",
+ "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc",
+ "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2",
+ "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155",
+ "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb",
+ "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0",
+ "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c",
+ "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a",
+ "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004",
+ "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060",
+ "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232",
+ "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93",
+ "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129",
+ "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163",
+ "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de",
+ "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6",
+ "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23",
+ "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569",
+ "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d",
+ "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778",
+ "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d",
+ "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36",
+ "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a",
+ "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6",
+ "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34",
+ "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704",
+ "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106",
+ "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9",
+ "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862",
+ "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b",
+ "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255",
+ "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16",
+ "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3",
+ "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133",
+ "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb",
+ "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657",
+ "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d",
+ "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca",
+ "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36",
+ "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c",
+ "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e",
+ "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff",
+ "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7",
+ "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5",
+ "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02",
+ "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c",
+ "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df",
+ "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3",
+ "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a",
+ "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959",
+ "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234",
+ "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.8'",
+ "version": "==7.6.1"
+ }
+ }
+}
diff --git a/README.md b/README.md
index e3e84c69..972fa2e7 100644
--- a/README.md
+++ b/README.md
@@ -3,6 +3,8 @@
KBase core service to manage app and module information, registration, and release.
Administrators need to be set separately for the job stats page by being added to [deploy.cfg.](https://github.com/kbaseapps/kb_Metrics/blob/master/deploy.cfg)
+Test: Please refer to the instructions at the top of `test/test.cfg.example` file.
+
Build status:
master: [](https://travis-ci.org/kbase/catalog)
staging: [](https://travis-ci.org/kbase/catalog)
@@ -11,74 +13,3 @@ develop: [
[](https://coveralls.io/github/kbase/catalog?branch=develop)
-#### v2.2.4 - 7/10/2020
- - Use auth role for list_approved_developers()
-
-#### v2.2.0 - 1/23/19
- - Update code to run on Python 3
- - Use Auth Roles for Catalog Admin
-
-#### v2.1.3 - 11/16/18
- - Update docker-py client code to current 3.x API
- - Get Travis-CI tests working again
- - Convert to dockerhub image builds
- - Change start script to keep service running in foreground
-
-#### v2.1.2 - 3/16/18
- - Pull a new base image if possible each time a module is registered
- - Fix the logic that allows additional html files to be passed from a method's
- ui specification directory to the narrative method service during method validation
-
-#### v2.1.1 - 6/26/17
- - Bugfix for change in docker build log
-
-#### v2.1.0 - 4/13/17
- - No change from 2.0.7, but upgraded minor version number because many new features
- now exist since the initial 2.0.x release.
-
-#### v2.0.7 - 3/28/17
- - Added job_id field to raw execution statistics
- - Support for hidden configuration parameters
-
-#### v2.0.6 - 12/7/16
- - Bug is fixed in module registration related to docker client timeout happening
- for long reference-data stage
-
-#### v2.0.5 - 9/12/16
- - Added volume mount configuration
- - Modified client group configurations so that functions are specified, not app_ids
- - Allow admin users to register modules
- - Initial porting to new KBase authentication clients
-
-#### v2.0.3 - 5/31/16
- - Major release to support storage of local functions and dynamic services information,
- including methods to query/filter/fetch local function and dynamic service info
- - Improved methods for fetching module versions by semantic version matching
- - All old module versions are now preserved and can be retrieved by git commit hash
- - Module descriptions are now attached to specific module versions instead of to
- the module itself, so are effectively versioned
- - Tests extended to cover docker steps in registration in Travis, and added to coveralls
-
-#### v1.0.4 - 2/26/16
- - Fix for bug with accessible dev-version after registration failure
-
-#### v1.0.3 - 2/24/16
- - Method to generate usage stats for admins
-
-#### v1.0.2 - 2/18/16
- - Allow specification of client groups
- - Method to check for admin status
-
-#### v1.0.1 - 2/17/16
- - Prevent reregistration of inactive modules
-
-#### v1.0.0 - 2/11/16
- - First release, all features are new
- - Dynamic KBase SDK module registration
- - Management of the module release process (dev->beta->release)
- - Versioning of all release versions
- - Basic query and search of modules
- - Management of approved KBase developers
- - Management of favorite Apps
- - Tracking and query of SDK module run statistics
- - Admin methods for approving modules/developers, updating module state
diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md
new file mode 100644
index 00000000..95eb6038
--- /dev/null
+++ b/RELEASE_NOTES.md
@@ -0,0 +1,86 @@
+# Catalog Service release notes
+
+## v2.3.1 - 9/3/2025
+ - Added a JITPack build for the catalog client
+
+## v2.3.0 - 3/5/2025
+ - Removed all submodules(jars, kbapi_common, nms) as part of repository clean up.
+ - The MongoDB clients have been updated to the most recent version and the service tested against Mongo 7.
+ - Added the mongo-retrywrites configuration setting in deployment.cfg.templ, defaulting to false.
+ - Added the docker-compose_nms.yml file to start a narrative method store server in test mode.
+ - Added pipenv to handle dependencies.
+ - Removed .travis.yml and added test.yml in .github/workflows for updated CI configuration.
+ - Updated Python to 3.9.19.
+ - Implemented two MongoDB client initializations in db.py: one during the init setup and another for lazy initialization after process forking, preventing the "MongoClient opened before fork" error.
+
+## v2.2.4 - 7/10/2020
+ - Use auth role for list_approved_developers()
+
+## v2.2.0 - 1/23/19
+ - Update code to run on Python 3
+ - Use Auth Roles for Catalog Admin
+
+## v2.1.3 - 11/16/18
+ - Update docker-py client code to current 3.x API
+ - Get Travis-CI tests working again
+ - Convert to dockerhub image builds
+ - Change start script to keep service running in foreground
+
+## v2.1.2 - 3/16/18
+ - Pull a new base image if possible each time a module is registered
+ - Fix the logic that allows additional html files to be passed from a method's
+ ui specification directory to the narrative method service during method validation
+
+## v2.1.1 - 6/26/17
+ - Bugfix for change in docker build log
+
+## v2.1.0 - 4/13/17
+ - No change from 2.0.7, but upgraded minor version number because many new features
+ now exist since the initial 2.0.x release.
+
+## v2.0.7 - 3/28/17
+ - Added job_id field to raw execution statistics
+ - Support for hidden configuration parameters
+
+## v2.0.6 - 12/7/16
+ - Bug is fixed in module registration related to docker client timeout happening
+ for long reference-data stage
+
+## v2.0.5 - 9/12/16
+ - Added volume mount configuration
+ - Modified client group configurations so that functions are specified, not app_ids
+ - Allow admin users to register modules
+ - Initial porting to new KBase authentication clients
+
+## v2.0.3 - 5/31/16
+ - Major release to support storage of local functions and dynamic services information,
+ including methods to query/filter/fetch local function and dynamic service info
+ - Improved methods for fetching module versions by semantic version matching
+ - All old module versions are now preserved and can be retrieved by git commit hash
+ - Module descriptions are now attached to specific module versions instead of to
+ the module itself, so are effectively versioned
+ - Tests extended to cover docker steps in registration in Travis, and added to coveralls
+
+## v1.0.4 - 2/26/16
+ - Fix for bug with accessible dev-version after registration failure
+
+## v1.0.3 - 2/24/16
+ - Method to generate usage stats for admins
+
+## v1.0.2 - 2/18/16
+ - Allow specification of client groups
+ - Method to check for admin status
+
+## v1.0.1 - 2/17/16
+ - Prevent reregistration of inactive modules
+
+## v1.0.0 - 2/11/16
+ - First release, all features are new
+ - Dynamic KBase SDK module registration
+ - Management of the module release process (dev->beta->release)
+ - Versioning of all release versions
+ - Basic query and search of modules
+ - Management of approved KBase developers
+ - Management of favorite Apps
+ - Tracking and query of SDK module run statistics
+ - Admin methods for approving modules/developers, updating module state
\ No newline at end of file
diff --git a/build.gradle b/build.gradle
new file mode 100644
index 00000000..dcd377d6
--- /dev/null
+++ b/build.gradle
@@ -0,0 +1,60 @@
+/*
+ * This file was generated by the Gradle 'init' task.
+ */
+
+plugins {
+ id 'java'
+ id 'maven-publish'
+}
+
+group = 'com.github.kbase'
+
+var VER_AUTH2_CLIENT = "0.5.0"
+var VER_JAVA_COMMON = "0.3.0"
+
+repositories {
+ mavenCentral()
+ maven {
+ name = "JitPack"
+ url = 'https://jitpack.io'
+ }
+}
+
+compileJava {
+ // TODO BUILD remove when we no longer support java 8, use `options.release = 11` if needed
+ java.sourceCompatibility = JavaVersion.VERSION_1_8
+ java.targetCompatibility = JavaVersion.VERSION_1_8
+}
+
+java {
+ withSourcesJar()
+ withJavadocJar()
+}
+
+javadoc {
+ options {
+ // I don't know why this isn't working, but it's not worth spending time on right now
+ links "https://docs.oracle.com/javase/11/docs/api/"
+ links "https://javadoc.jitpack.io/com/github/kbase/auth2_client_java/$VER_AUTH2_CLIENT/javadoc/"
+ links "https://javadoc.jitpack.io/com/github/kbase/java_common/$VER_JAVA_COMMON/javadoc/"
+ }
+}
+
+publishing {
+ publications {
+ maven(MavenPublication) {
+ from components.java
+ }
+ }
+}
+
+dependencies {
+
+ // using older dependencies to not force upgrades on services that might not be able to
+ // handle them. Need to upgrade the services and then upgrade here
+ implementation "com.github.kbase:java_common:$VER_JAVA_COMMON"
+ implementation "com.fasterxml.jackson.core:jackson-databind:2.5.4"
+ implementation "com.github.kbase:auth2_client_java:$VER_AUTH2_CLIENT"
+ implementation 'javax.annotation:javax.annotation-api:1.3.2'
+
+}
diff --git a/deploy.cfg b/deploy.cfg
index d4759445..9e4dd1ea 100644
--- a/deploy.cfg
+++ b/deploy.cfg
@@ -12,6 +12,10 @@ mongodb-database = catalog
# password for the account
#mongodb-pwd = add password here
+# Whether to enable ('true') the MongoDB retryWrites parameter or not (anything other than 'true').
+# See https://www.mongodb.com/docs/manual/core/retryable-writes/
+mongodb-retrywrites=false
+
# The KBase auth server url.
auth-service-url = https://kbase.us/services/authorization/Sessions/Login
admin-roles = KBASE_ADMIN,CATALOG_ADMIN
diff --git a/deployment/conf/.templates/deploy.cfg.templ b/deployment/conf/.templates/deploy.cfg.templ
index 0e35e725..295d6edd 100644
--- a/deployment/conf/.templates/deploy.cfg.templ
+++ b/deployment/conf/.templates/deploy.cfg.templ
@@ -15,6 +15,9 @@ mongodb-pwd = {{ default .Env.mongodb_pwd "" }}
# auth mechanism
mongodb-authmechanism = {{ default .Env.mongodb_authMechanism "DEFAULT" }}
+# Whether to enable the MongoDB retryWrites parameter or not
+mongodb-retrywrites={{ default .Env.mongodb_retrywrites "false" }}
+
# The KBase auth server url.
auth-service-url = {{ default .Env.auth_service_url "https://kbase.us/services/authorization/Sessions/Login" }}
# The KBase auth API.
diff --git a/gradle.properties b/gradle.properties
new file mode 100644
index 00000000..377538c9
--- /dev/null
+++ b/gradle.properties
@@ -0,0 +1,5 @@
+# This file was generated by the Gradle 'init' task.
+# https://docs.gradle.org/current/userguide/build_environment.html#sec:gradle_configuration_properties
+
+org.gradle.configuration-cache=true
+
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
new file mode 100644
index 00000000..58ad57ac
--- /dev/null
+++ b/gradle/libs.versions.toml
@@ -0,0 +1,2 @@
+# This file was generated by the Gradle 'init' task.
+# https://docs.gradle.org/current/userguide/platforms.html#sub::toml-dependencies-format
\ No newline at end of file
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 00000000..1b33c55b
Binary files /dev/null and b/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 00000000..d4081da4
--- /dev/null
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,7 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip
+networkTimeout=10000
+validateDistributionUrl=true
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
diff --git a/gradlew b/gradlew
new file mode 100755
index 00000000..23d15a93
--- /dev/null
+++ b/gradlew
@@ -0,0 +1,251 @@
+#!/bin/sh
+
+#
+# Copyright Š 2015-2021 the original authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+#
+
+##############################################################################
+#
+# Gradle start up script for POSIX generated by Gradle.
+#
+# Important for running:
+#
+# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
+# noncompliant, but you have some other compliant shell such as ksh or
+# bash, then to run this script, type that shell name before the whole
+# command line, like:
+#
+# ksh Gradle
+#
+# Busybox and similar reduced shells will NOT work, because this script
+# requires all of these POSIX shell features:
+# * functions;
+# * expansions ÂĢ$varÂģ, ÂĢ${var}Âģ, ÂĢ${var:-default}Âģ, ÂĢ${var+SET}Âģ,
+# ÂĢ${var#prefix}Âģ, ÂĢ${var%suffix}Âģ, and ÂĢ$( cmd )Âģ;
+# * compound commands having a testable exit status, especially ÂĢcaseÂģ;
+# * various built-in commands including ÂĢcommandÂģ, ÂĢsetÂģ, and ÂĢulimitÂģ.
+#
+# Important for patching:
+#
+# (2) This script targets any POSIX shell, so it avoids extensions provided
+# by Bash, Ksh, etc; in particular arrays are avoided.
+#
+# The "traditional" practice of packing multiple parameters into a
+# space-separated string is a well documented source of bugs and security
+# problems, so this is (mostly) avoided, by progressively accumulating
+# options in "$@", and eventually passing that to Java.
+#
+# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
+# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
+# see the in-line comments for details.
+#
+# There are tweaks for specific operating systems such as AIX, CygWin,
+# Darwin, MinGW, and NonStop.
+#
+# (3) This script is generated from the Groovy template
+# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
+# within the Gradle project.
+#
+# You can find Gradle at https://github.com/gradle/gradle/.
+#
+##############################################################################
+
+# Attempt to set APP_HOME
+
+# Resolve links: $0 may be a link
+app_path=$0
+
+# Need this for daisy-chained symlinks.
+while
+ APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
+ [ -h "$app_path" ]
+do
+ ls=$( ls -ld "$app_path" )
+ link=${ls#*' -> '}
+ case $link in #(
+ /*) app_path=$link ;; #(
+ *) app_path=$APP_HOME$link ;;
+ esac
+done
+
+# This is normally unused
+# shellcheck disable=SC2034
+APP_BASE_NAME=${0##*/}
+# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
+APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD=maximum
+
+warn () {
+ echo "$*"
+} >&2
+
+die () {
+ echo
+ echo "$*"
+ echo
+ exit 1
+} >&2
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "$( uname )" in #(
+ CYGWIN* ) cygwin=true ;; #(
+ Darwin* ) darwin=true ;; #(
+ MSYS* | MINGW* ) msys=true ;; #(
+ NONSTOP* ) nonstop=true ;;
+esac
+
+CLASSPATH="\\\"\\\""
+
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD=$JAVA_HOME/jre/sh/java
+ else
+ JAVACMD=$JAVA_HOME/bin/java
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD=java
+ if ! command -v java >/dev/null 2>&1
+ then
+ die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+fi
+
+# Increase the maximum file descriptors if we can.
+if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
+ case $MAX_FD in #(
+ max*)
+ # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
+ # shellcheck disable=SC2039,SC3045
+ MAX_FD=$( ulimit -H -n ) ||
+ warn "Could not query maximum file descriptor limit"
+ esac
+ case $MAX_FD in #(
+ '' | soft) :;; #(
+ *)
+ # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
+ # shellcheck disable=SC2039,SC3045
+ ulimit -n "$MAX_FD" ||
+ warn "Could not set maximum file descriptor limit to $MAX_FD"
+ esac
+fi
+
+# Collect all arguments for the java command, stacking in reverse order:
+# * args from the command line
+# * the main class name
+# * -classpath
+# * -D...appname settings
+# * --module-path (only if needed)
+# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
+
+# For Cygwin or MSYS, switch paths to Windows format before running java
+if "$cygwin" || "$msys" ; then
+ APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
+ CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
+
+ JAVACMD=$( cygpath --unix "$JAVACMD" )
+
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ for arg do
+ if
+ case $arg in #(
+ -*) false ;; # don't mess with options #(
+ /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
+ [ -e "$t" ] ;; #(
+ *) false ;;
+ esac
+ then
+ arg=$( cygpath --path --ignore --mixed "$arg" )
+ fi
+ # Roll the args list around exactly as many times as the number of
+ # args, so each arg winds up back in the position where it started, but
+ # possibly modified.
+ #
+ # NB: a `for` loop captures its iteration list before it begins, so
+ # changing the positional parameters here affects neither the number of
+ # iterations, nor the values presented in `arg`.
+ shift # remove old arg
+ set -- "$@" "$arg" # push replacement arg
+ done
+fi
+
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
+
+# Collect all arguments for the java command:
+# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
+# and any embedded shellness will be escaped.
+# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
+# treated as '${Hostname}' itself on the command line.
+
+set -- \
+ "-Dorg.gradle.appname=$APP_BASE_NAME" \
+ -classpath "$CLASSPATH" \
+ -jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \
+ "$@"
+
+# Stop when "xargs" is not available.
+if ! command -v xargs >/dev/null 2>&1
+then
+ die "xargs is not available"
+fi
+
+# Use "xargs" to parse quoted args.
+#
+# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
+#
+# In Bash we could simply go:
+#
+# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
+# set -- "${ARGS[@]}" "$@"
+#
+# but POSIX shell has neither arrays nor command substitution, so instead we
+# post-process each arg (as a line of input to sed) to backslash-escape any
+# character that might be a shell metacharacter, then use eval to reverse
+# that process (while maintaining the separation between arguments), and wrap
+# the whole thing up as a single "set" statement.
+#
+# This will of course break if any of these variables contains a newline or
+# an unmatched quote.
+#
+
+eval "set -- $(
+ printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
+ xargs -n1 |
+ sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
+ tr '\n' ' '
+ )" '"$@"'
+
+exec "$JAVACMD" "$@"
diff --git a/jars b/jars
deleted file mode 160000
index 7a0edafa..00000000
--- a/jars
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 7a0edafa4dd508eb5f38c1d7810c91e73a0f76d7
diff --git a/kbapi_common b/kbapi_common
deleted file mode 160000
index 65a6d746..00000000
--- a/kbapi_common
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 65a6d746b8c66a89f6c9bf52adf33d5725ee2011
diff --git a/lib/biokbase/catalog/Impl.py b/lib/biokbase/catalog/Impl.py
index 2e55ed75..255e5705 100644
--- a/lib/biokbase/catalog/Impl.py
+++ b/lib/biokbase/catalog/Impl.py
@@ -21,9 +21,9 @@ class Catalog:
# state. A method could easily clobber the state set by another while
# the latter method is running.
######################################### noqa
- VERSION = "0.0.1"
- GIT_URL = "https://github.com/kbase/catalog"
- GIT_COMMIT_HASH = "fda05a2962373163e4983dc5187b1c51cd1455b1"
+ VERSION = "0.1.0"
+ GIT_URL = "https://github.com/kbase/catalog.git"
+ GIT_COMMIT_HASH = "bfd28df246cd39404293ea4cf4cd4200950fc900"
#BEGIN_CLASS_HEADER
#END_CLASS_HEADER
diff --git a/lib/biokbase/catalog/controller.py b/lib/biokbase/catalog/controller.py
index f219f102..11032f40 100644
--- a/lib/biokbase/catalog/controller.py
+++ b/lib/biokbase/catalog/controller.py
@@ -56,13 +56,21 @@ def __init__(self, config):
warnings.warn('"mongodb-authmechanism" is not set in config of CatalogController, using DEFAULT.')
config['mongodb-authmechanism'] = 'DEFAULT'
+ if 'mongodb-retrywrites' not in config: # pragma: no cover
+ warnings.warn('"mongodb-retrywrites" is not set in config of CatalogController, using False.')
+ config['mongodb-retrywrites'] = False
+ else:
+ config['mongodb-retrywrites'] = config['mongodb-retrywrites'] == "true"
+
# instantiate the mongo client
self.db = MongoCatalogDBI(
config['mongodb-host'],
config['mongodb-database'],
config['mongodb-user'],
config['mongodb-pwd'],
- config['mongodb-authmechanism'])
+ config['mongodb-authmechanism'],
+ config['mongodb-retrywrites']
+ )
# check for the temp directory and make sure it exists
if 'temp-dir' not in config: # pragma: no cover
diff --git a/lib/biokbase/catalog/db.py b/lib/biokbase/catalog/db.py
index d3ba841e..6ca5ff28 100644
--- a/lib/biokbase/catalog/db.py
+++ b/lib/biokbase/catalog/db.py
@@ -1,9 +1,11 @@
import copy
import pprint
+import threading
from pymongo import ASCENDING
from pymongo import DESCENDING
from pymongo import MongoClient
+from pymongo.errors import ConnectionFailure
'''
@@ -97,6 +99,8 @@
}
'''
+# Module level lock
+lock = threading.Lock()
class MongoCatalogDBI:
# Collection Names
@@ -118,20 +122,86 @@ class MongoCatalogDBI:
_EXEC_STATS_USERS = 'exec_stats_users'
_SECURE_CONFIG_PARAMS = 'secure_config_params'
- def __init__(self, mongo_host, mongo_db, mongo_user, mongo_psswd, mongo_authMechanism):
-
- # create the client
- self.mongo = MongoClient('mongodb://' + mongo_host)
-
- # Try to authenticate, will throw an exception if the user/psswd is not valid for the db
- # the pymongo docs say authenticate() is deprecated, but testing putting auth in
- # the MongoClient call failed
- # to do: add authMechanism as an argument
- if (mongo_user and mongo_psswd):
- self.mongo[mongo_db].authenticate(mongo_user, mongo_psswd, mechanism=mongo_authMechanism)
+ def __init__(self, mongo_host, mongo_db, mongo_user, mongo_psswd, mongo_auth_mechanism, mongo_retry_writes):
+
+ # We're performing two MongoDB client initializationsâone during the initial setup (on init), and
+ # another lazy initialization after the process is forked. This approach is necessary because MongoDB client
+ # connections are not fork-safe and some servers, including uwsgi, fork workers from parent processes.
+
+ # When a process is forked, the child process inherits a copy of the parentâs memory,
+ # but the client connection doesn't transfer properly. This can cause issues if both the parent and child
+ # processes share the same connection. To prevent this, we close the MongoDB client connection before forking
+ # and reinitialize it in each process, ensuring that both the parent and child processes maintain
+ # their own independent, functional connections.
+
+ self.mongo_host = mongo_host
+ self.mongo_db = mongo_db
+ self.mongo_user = mongo_user
+ self.mongo_psswd = mongo_psswd
+ self.mongo_auth_mechanism = mongo_auth_mechanism
+ self.mongo_retry_writes = mongo_retry_writes
+
+ self.mongo_client = None
+
+ # Initialize mongo client
+ mongo_client = self._initialize_mongo_client()
+
+ # Check the db schema
+ self.check_db_schema(mongo_client)
+
+ # Create indexes
+ self._create_indexes(mongo_client)
+
+ # Close the MongoDB client manually before forking
+ mongo_client.close()
+ print("MongoDB client closed.")
+
+
+ def _initialize_mongo_client(self):
+ """Initialize MongoDB client."""
+ try:
+ # This is only tested manually
+ if self.mongo_user and self.mongo_psswd:
+ # Connection string with authentication
+ mongo_client = MongoClient(
+ self.mongo_host,
+ username=self.mongo_user,
+ password=self.mongo_psswd,
+ authSource=self.mongo_db,
+ authMechanism=self.mongo_auth_mechanism,
+ retryWrites=self.mongo_retry_writes
+ )
+ else:
+ # Connection string without authentication
+ mongo_client = MongoClient(
+ self.mongo_host,
+ retryWrites=self.mongo_retry_writes
+ )
- # Grab a handle to the database and collections
- self.db = self.mongo[mongo_db]
+ # Force a call to server to verify the connection
+ mongo_client.server_info()
+ print("Connection successful!")
+
+ return mongo_client
+
+ except ConnectionFailure as e:
+ raise ValueError(f"Cannot connect to Mongo server: {e}") from e
+
+ def _ensure_mongo_connection(self):
+ """Ensure the MongoDB connection is active."""
+ # Don't enter the lock if we already have the client
+ if not self.mongo_client:
+ # Use the lock to ensure only one thread initializes the mongo client at a time
+ with lock:
+ # Double-check if another thread has already initialized the client while we were waiting for the lock
+ if self.mongo_client:
+ return
+ self.mongo_client = self._initialize_mongo_client()
+ self._create_collections()
+
+ def _create_collections(self):
+ """Grab a handle to the database and collections."""
+ self.db = self.mongo_client[self.mongo_db]
self.modules = self.db[MongoCatalogDBI._MODULES]
self.module_versions = self.db[MongoCatalogDBI._MODULE_VERSIONS]
@@ -148,104 +218,116 @@ def __init__(self, mongo_host, mongo_db, mongo_user, mongo_psswd, mongo_authMech
self.secure_config_params = self.db[MongoCatalogDBI._SECURE_CONFIG_PARAMS]
- # check the db schema
- self.check_db_schema()
+ def _create_indexes(self, mongo_client):
+ db = mongo_client[self.mongo_db]
# Make sure we have an index on module and git_repo_url
- self.module_versions.create_index('module_name_lc', sparse=False)
- self.module_versions.create_index('git_commit_hash', sparse=False)
- self.module_versions.create_index([
+ module_versions = db[MongoCatalogDBI._MODULE_VERSIONS]
+ module_versions.create_index('module_name_lc', sparse=False)
+ module_versions.create_index('git_commit_hash', sparse=False)
+ module_versions.create_index([
('module_name_lc', ASCENDING),
('git_commit_hash', ASCENDING)],
unique=True, sparse=False)
# Make sure we have a unique index on module_name_lc and git_commit_hash
- self.local_functions.create_index('function_id')
- self.local_functions.create_index([
+ local_functions = db[MongoCatalogDBI._LOCAL_FUNCTIONS]
+ local_functions.create_index('function_id')
+ local_functions.create_index([
('module_name_lc', ASCENDING),
('function_id', ASCENDING),
('git_commit_hash', ASCENDING)],
unique=True, sparse=False)
# local function indecies
- self.local_functions.create_index('module_name_lc')
- self.local_functions.create_index('git_commit_hash')
- self.local_functions.create_index('function_id')
- self.local_functions.create_index([
+ local_functions.create_index('module_name_lc')
+ local_functions.create_index('git_commit_hash')
+ local_functions.create_index('function_id')
+ local_functions.create_index([
('module_name_lc', ASCENDING),
('function_id', ASCENDING),
('git_commit_hash', ASCENDING)],
unique=True, sparse=False)
# developers indecies
- self.developers.create_index('kb_username', unique=True)
+ developers = db[MongoCatalogDBI._DEVELOPERS]
+ developers.create_index('kb_username', unique=True)
- self.build_logs.create_index('registration_id', unique=True)
- self.build_logs.create_index('module_name_lc')
- self.build_logs.create_index('timestamp')
- self.build_logs.create_index('registration')
- self.build_logs.create_index('git_url')
- self.build_logs.create_index('current_versions.release.release_timestamp')
+ build_logs = db[MongoCatalogDBI._BUILD_LOGS]
+ build_logs.create_index('registration_id', unique=True)
+ build_logs.create_index('module_name_lc')
+ build_logs.create_index('timestamp')
+ build_logs.create_index('registration')
+ build_logs.create_index('git_url')
+ build_logs.create_index('current_versions.release.release_timestamp')
# for favorites
- self.favorites.create_index('user')
- self.favorites.create_index('module_name_lc')
- self.favorites.create_index('id')
+ favorites = db[MongoCatalogDBI._FAVORITES]
+ favorites.create_index('user')
+ favorites.create_index('module_name_lc')
+ favorites.create_index('id')
# you can only favorite a method once, so put a unique index on the triple
- self.favorites.create_index([
+ favorites.create_index([
('user', ASCENDING),
('id', ASCENDING),
('module_name_lc', ASCENDING)],
unique=True, sparse=False)
# execution stats
- self.exec_stats_raw.create_index('user_id',
+ exec_stats_raw = db[MongoCatalogDBI._EXEC_STATS_RAW]
+ exec_stats_raw.create_index('user_id',
unique=False, sparse=False)
- self.exec_stats_raw.create_index([('app_module_name', ASCENDING),
+ exec_stats_raw.create_index([('app_module_name', ASCENDING),
('app_id', ASCENDING)],
unique=False, sparse=True)
- self.exec_stats_raw.create_index([('func_module_name', ASCENDING),
+ exec_stats_raw.create_index([('func_module_name', ASCENDING),
('func_name', ASCENDING)],
unique=False, sparse=True)
- self.exec_stats_raw.create_index('creation_time',
+ exec_stats_raw.create_index('creation_time',
unique=False, sparse=False)
- self.exec_stats_raw.create_index('finish_time',
+ exec_stats_raw.create_index('finish_time',
unique=False, sparse=False)
- self.exec_stats_apps.create_index('module_name',
+ exec_stats_apps = db[MongoCatalogDBI._EXEC_STATS_APPS]
+ exec_stats_apps.create_index('module_name',
unique=False, sparse=True)
- self.exec_stats_apps.create_index([('full_app_id', ASCENDING),
+ exec_stats_apps.create_index([('full_app_id', ASCENDING),
('type', ASCENDING),
('time_range', ASCENDING)],
unique=True, sparse=False)
- self.exec_stats_apps.create_index([('type', ASCENDING),
+ exec_stats_apps.create_index([('type', ASCENDING),
('time_range', ASCENDING)],
unique=False, sparse=False)
- self.exec_stats_users.create_index([('user_id', ASCENDING),
+ exec_stats_users = db[MongoCatalogDBI._EXEC_STATS_USERS]
+ exec_stats_users.create_index([('user_id', ASCENDING),
('type', ASCENDING),
('time_range', ASCENDING)],
unique=True, sparse=False)
# client groups and volume mounts
- self.client_groups.create_index([('module_name_lc', ASCENDING),
+ client_groups = db[MongoCatalogDBI._CLIENT_GROUPS]
+ client_groups.create_index([('module_name_lc', ASCENDING),
('function_name', ASCENDING)],
unique=True, sparse=False)
- self.volume_mounts.create_index([('client_group', ASCENDING),
+ volume_mounts = db[MongoCatalogDBI._VOLUME_MOUNTS]
+ volume_mounts.create_index([('client_group', ASCENDING),
('module_name_lc', ASCENDING),
('function_name', ASCENDING)],
unique=True, sparse=False)
# hidden configuration parameters
- self.secure_config_params.create_index('module_name_lc')
- self.secure_config_params.create_index([
+ secure_config_params = db[MongoCatalogDBI._SECURE_CONFIG_PARAMS]
+ secure_config_params.create_index('module_name_lc')
+ secure_config_params.create_index([
('module_name_lc', ASCENDING),
('version', ASCENDING),
('param_name', ASCENDING)],
unique=True, sparse=False)
def is_registered(self, module_name='', git_url=''):
+ self._ensure_mongo_connection()
if not module_name and not git_url:
return False
query = self._get_mongo_query(module_name=module_name, git_url=git_url)
@@ -255,6 +337,7 @@ def is_registered(self, module_name='', git_url=''):
return False
def module_name_lc_exists(self, module_name_lc=''):
+ self._ensure_mongo_connection()
if not module_name_lc:
return False
module = self.modules.find_one({'module_name_lc': module_name_lc.lower()}, ['_id'])
@@ -264,6 +347,7 @@ def module_name_lc_exists(self, module_name_lc=''):
#### SET methods
def create_new_build_log(self, registration_id, timestamp, registration_state, git_url):
+ self._ensure_mongo_connection()
build_log = {
'registration_id': registration_id,
'timestamp': timestamp,
@@ -275,22 +359,26 @@ def create_new_build_log(self, registration_id, timestamp, registration_state, g
self.build_logs.insert_one(build_log)
def delete_build_log(self, registration_id):
+ self._ensure_mongo_connection()
self.build_logs.delete_one({'registration_id': registration_id})
# new_lines is a list to objects, each representing a line
# the object structure is : {'content':... 'error':True/False}
def append_to_build_log(self, registration_id, new_lines):
+ self._ensure_mongo_connection()
result = self.build_logs.update_one({'registration_id': registration_id},
{'$push': {'log': {'$each': new_lines}}})
return self._check_update_result(result)
def set_build_log_state(self, registration_id, registration_state, error_message=''):
+ self._ensure_mongo_connection()
result = self.build_logs.update_one({'registration_id': registration_id},
{'$set': {'registration': registration_state,
'error_message': error_message}})
return self._check_update_result(result)
def set_build_log_module_name(self, registration_id, module_name):
+ self._ensure_mongo_connection()
result = self.build_logs.update_one({'registration_id': registration_id},
{'$set': {'module_name_lc': module_name.lower()}})
return self._check_update_result(result)
@@ -303,7 +391,7 @@ def list_builds(self,
only_running=False,
only_error=False,
only_complete=False):
-
+ self._ensure_mongo_connection()
query = {}
registration_match = None
@@ -337,11 +425,12 @@ def list_builds(self,
query, selection,
skip=skip,
limit=limit,
- sort=[['timestamp', DESCENDING]]))
+ sort=[('timestamp', DESCENDING)]))
# slice arg is used in the mongo query for getting lines. It is either a
# pos int (get first n lines), neg int (last n lines), or array [skip, limit]
def get_parsed_build_log(self, registration_id, slice_arg=None):
+ self._ensure_mongo_connection()
selection = {
'registration_id': 1,
'timestamp': 1,
@@ -359,6 +448,7 @@ def get_parsed_build_log(self, registration_id, slice_arg=None):
def register_new_module(self, git_url, username, timestamp, registration_state,
registration_id):
+ self._ensure_mongo_connection()
# get current time since epoch in ms in utc
module = {
'info': {},
@@ -384,6 +474,7 @@ def register_new_module(self, git_url, username, timestamp, registration_state,
# if the last_state does not match indicating another process changed the state
def set_module_registration_state(self, module_name='', git_url='', new_state=None,
last_state=None, error_message=''):
+ self._ensure_mongo_connection()
if new_state:
query = self._get_mongo_query(module_name=module_name, git_url=git_url)
if last_state:
@@ -395,6 +486,7 @@ def set_module_registration_state(self, module_name='', git_url='', new_state=No
def set_module_release_state(self, module_name='', git_url='', new_state=None, last_state=None,
review_message=''):
+ self._ensure_mongo_connection()
if new_state:
query = self._get_mongo_query(module_name=module_name, git_url=git_url)
if last_state:
@@ -405,7 +497,7 @@ def set_module_release_state(self, module_name='', git_url='', new_state=None, l
return False
def push_beta_to_release(self, module_name='', git_url='', release_timestamp=None):
-
+ self._ensure_mongo_connection()
current_versions = self.get_module_current_versions(module_name=module_name,
git_url=git_url,
substitute_versions=False)
@@ -433,6 +525,7 @@ def push_beta_to_release(self, module_name='', git_url='', release_timestamp=Non
return self._check_update_result(result)
def push_dev_to_beta(self, module_name='', git_url=''):
+ self._ensure_mongo_connection()
current_versions = self.get_module_current_versions(module_name=module_name,
git_url=git_url,
substitute_versions=False)
@@ -443,6 +536,7 @@ def push_dev_to_beta(self, module_name='', git_url=''):
return self._check_update_result(result)
def update_dev_version(self, version_info):
+ self._ensure_mongo_connection()
if version_info:
if 'git_commit_hash' in version_info and 'module_name_lc' in version_info:
@@ -471,6 +565,7 @@ def update_dev_version(self, version_info):
return False
def save_local_function_specs(self, local_functions):
+ self._ensure_mongo_connection()
# just using insert doesn't accept a list of docs in mongo 2.6, so loop for now
for l in local_functions:
matcher = {'module_name_lc': l['module_name_lc'], 'function_id': l['function_id'],
@@ -486,7 +581,7 @@ def save_local_function_specs(self, local_functions):
def lookup_module_versions(self, module_name, git_commit_hash=None, released=None,
included_fields=[], excluded_fields=[]):
-
+ self._ensure_mongo_connection()
query = {'module_name_lc': module_name.strip().lower()}
if git_commit_hash is not None:
@@ -505,7 +600,7 @@ def lookup_module_versions(self, module_name, git_commit_hash=None, released=Non
return list(self.module_versions.find(query, selection))
def list_local_function_info(self, release_tag=None, module_names=[]):
-
+ self._ensure_mongo_connection()
git_commit_hash_list = []
git_commit_hash_release_tag_map = {}
@@ -577,7 +672,7 @@ def list_local_function_info(self, release_tag=None, module_names=[]):
return returned_funcs
def get_local_function_spec(self, functions):
-
+ self._ensure_mongo_connection()
result_list = []
# first lookup all the module info so we can figure out any tags, and make a quick dict
@@ -675,6 +770,7 @@ def get_local_function_spec(self, functions):
return result_list
def set_module_name(self, git_url, module_name):
+ self._ensure_mongo_connection()
if not module_name:
raise ValueError('module_name must be defined to set a module name')
query = self._get_mongo_query(git_url=git_url)
@@ -683,6 +779,7 @@ def set_module_name(self, git_url, module_name):
return self._check_update_result(result)
def set_module_info(self, info, module_name='', git_url=''):
+ self._ensure_mongo_connection()
if not info:
raise ValueError('info must be defined to set the info for a module')
if type(info) is not dict:
@@ -692,6 +789,7 @@ def set_module_info(self, info, module_name='', git_url=''):
return self._check_update_result(result)
def set_module_owners(self, owners, module_name='', git_url=''):
+ self._ensure_mongo_connection()
if not owners:
raise ValueError('owners must be defined to set the owners for a module')
if type(owners) is not list:
@@ -702,16 +800,19 @@ def set_module_owners(self, owners, module_name='', git_url=''):
# active = True | False
def set_module_active_state(self, active, module_name='', git_url=''):
+ self._ensure_mongo_connection()
query = self._get_mongo_query(git_url=git_url, module_name=module_name)
result = self.modules.update_one(query, {'$set': {'state.active': active}})
return self._check_update_result(result)
#### GET methods
def get_module_state(self, module_name='', git_url=''):
+ self._ensure_mongo_connection()
query = self._get_mongo_query(module_name=module_name, git_url=git_url)
return self.modules.find_one(query, ['state'])['state']
def get_module_current_versions(self, module_name='', git_url='', substitute_versions=True):
+ self._ensure_mongo_connection()
query = self._get_mongo_query(module_name=module_name, git_url=git_url)
module_document = self.modules.find_one(query, ['module_name_lc', 'current_versions'])
if substitute_versions and 'module_name_lc' in module_document:
@@ -719,10 +820,12 @@ def get_module_current_versions(self, module_name='', git_url='', substitute_ver
return module_document['current_versions']
def get_module_owners(self, module_name='', git_url=''):
+ self._ensure_mongo_connection()
query = self._get_mongo_query(module_name=module_name, git_url=git_url)
return self.modules.find_one(query, ['owners'])['owners']
def get_module_details(self, module_name='', git_url='', substitute_versions=True):
+ self._ensure_mongo_connection()
query = self._get_mongo_query(module_name=module_name, git_url=git_url)
module_details = self.modules.find_one(query, ['module_name', 'module_name_lc', 'git_url',
'info', 'owners', 'state',
@@ -732,6 +835,7 @@ def get_module_details(self, module_name='', git_url='', substitute_versions=Tru
return module_details
def get_module_full_details(self, module_name='', git_url='', substitute_versions=True):
+ self._ensure_mongo_connection()
query = self._get_mongo_query(module_name=module_name, git_url=git_url)
module_document = self.modules.find_one(query)
if substitute_versions and 'module_name_lc' in module_document:
@@ -741,6 +845,7 @@ def get_module_full_details(self, module_name='', git_url='', substitute_version
#### LIST / SEARCH methods
def find_basic_module_info(self, query):
+ self._ensure_mongo_connection()
selection = {
'_id': 0,
'module_name': 1,
@@ -753,6 +858,7 @@ def find_basic_module_info(self, query):
return list(self.modules.find(query, selection))
def find_current_versions_and_owners(self, query):
+ self._ensure_mongo_connection()
result = list(self.modules.find(query,
{'module_name': 1, 'module_name_lc': 1, 'git_url': 1,
'current_versions': 1, 'owners': 1, '_id': 0}))
@@ -760,7 +866,7 @@ def find_current_versions_and_owners(self, query):
return result
def substitute_hashes_for_version_info(self, module_list):
-
+ self._ensure_mongo_connection()
# get all the version commit hashes
hash_list = []
for mod in module_list:
@@ -807,7 +913,7 @@ def substitute_hashes_for_version_info(self, module_list):
# tag should be one of dev, beta, release - do checking outside of this method
def list_service_module_versions_with_tag(self, tag):
-
+ self._ensure_mongo_connection()
mods = list(self.modules.find({'info.dynamic_service': 1},
{'module_name_lc': 1, 'module_name': 1,
'current_versions.' + tag: 1}))
@@ -828,6 +934,7 @@ def list_service_module_versions_with_tag(self, tag):
# all released service module versions
def list_all_released_service_module_versions(self):
+ self._ensure_mongo_connection()
return list(self.module_versions.find(
{
'dynamic_service': 1,
@@ -844,18 +951,21 @@ def list_all_released_service_module_versions(self):
#### developer check methods
def approve_developer(self, developer):
+ self._ensure_mongo_connection()
# if the developer is already on the list, just return
if self.is_approved_developer([developer])[0]:
return
self.developers.insert_one({'kb_username': developer})
def revoke_developer(self, developer):
+ self._ensure_mongo_connection()
# if the developer is not on the list, throw an error (maybe a typo, so let's catch it)
if not self.is_approved_developer([developer])[0]:
raise ValueError('Cannot revoke "' + developer + '", that developer was not found.')
self.developers.delete_one({'kb_username': developer})
def is_approved_developer(self, usernames):
+ self._ensure_mongo_connection()
# TODO: optimize, but I expect the list of usernames will be fairly small, so we can loop. Regardless, in
# old mongo (2.x) I think this is even faster in most cases than using $in within a very large list
is_approved = []
@@ -868,9 +978,11 @@ def is_approved_developer(self, usernames):
return is_approved
def list_approved_developers(self):
+ self._ensure_mongo_connection()
return list(self.developers.find({}, {'kb_username': 1, '_id': 0}))
def migrate_module_to_new_git_url(self, module_name, current_git_url, new_git_url):
+ self._ensure_mongo_connection()
if not new_git_url.strip():
raise ValueError('New git url is required to migrate_module_to_new_git_url.')
query = self._get_mongo_query(module_name=module_name, git_url=current_git_url)
@@ -882,6 +994,7 @@ def migrate_module_to_new_git_url(self, module_name, current_git_url, new_git_ur
return self._check_update_result(result)
def delete_module(self, module_name, git_url):
+ self._ensure_mongo_connection()
if not module_name and not git_url:
raise ValueError('Module name or git url is required to delete a module.')
query = self._get_mongo_query(module_name=module_name, git_url=git_url)
@@ -901,6 +1014,7 @@ def delete_module(self, module_name, git_url):
return self._check_update_result(result)
def add_favorite(self, module_name, app_id, username, timestamp):
+ self._ensure_mongo_connection()
favoriteAddition = {
'user': username,
'module_name_lc': module_name.strip().lower(),
@@ -915,6 +1029,7 @@ def add_favorite(self, module_name, app_id, username, timestamp):
self.favorites.insert_one(favoriteAddition)
def remove_favorite(self, module_name, app_id, username):
+ self._ensure_mongo_connection()
favoriteAddition = {
'user': username,
'module_name_lc': module_name.strip().lower(),
@@ -929,20 +1044,19 @@ def remove_favorite(self, module_name, app_id, username):
return self._check_update_result(result)
def list_user_favorites(self, username):
+ self._ensure_mongo_connection()
query = {'user': username}
selection = {'_id': 0, 'module_name_lc': 1, 'id': 1, 'timestamp': 1}
- return list(self.favorites.find(
- query, selection,
- sort=[['timestamp', DESCENDING]]))
+ return list(self.favorites.find(query, selection).sort('timestamp', DESCENDING))
def list_app_favorites(self, module_name, app_id):
+ self._ensure_mongo_connection()
query = {'module_name_lc': module_name.strip().lower(), 'id': app_id.strip()}
selection = {'_id': 0, 'user': 1, 'timestamp': 1}
- return list(self.favorites.find(
- query, selection,
- sort=[['timestamp', DESCENDING]]))
+ return list(self.favorites.find(query, selection).sort('timestamp', DESCENDING))
def aggregate_favorites_over_apps(self, module_names_lc):
+ self._ensure_mongo_connection()
### WARNING! If we switch to Mongo 3.x, the result object will change and this will break
# setup the query
@@ -986,6 +1100,7 @@ def aggregate_favorites_over_apps(self, module_names_lc):
# DEPRECATED! temporary function until everything is migrated to new client group structure
def list_client_groups(self, app_ids):
+ self._ensure_mongo_connection()
if app_ids is not None:
selection = {
'_id': 0,
@@ -1013,6 +1128,7 @@ def list_client_groups(self, app_ids):
return list(self.client_groups.find({}, selection))
def set_client_group_config(self, config):
+ self._ensure_mongo_connection()
config['module_name_lc'] = config['module_name'].lower()
return self._check_update_result(self.client_groups.replace_one(
{
@@ -1024,6 +1140,7 @@ def set_client_group_config(self, config):
))
def remove_client_group_config(self, config):
+ self._ensure_mongo_connection()
config['module_name_lc'] = config['module_name'].lower()
return self._check_update_result(self.client_groups.delete_one(
{
@@ -1033,6 +1150,7 @@ def remove_client_group_config(self, config):
))
def list_client_group_configs(self, filter):
+ self._ensure_mongo_connection()
selection = {"_id": 0, "module_name_lc": 0}
if 'module_name' in filter:
filter['module_name_lc'] = filter['module_name'].lower()
@@ -1040,6 +1158,7 @@ def list_client_group_configs(self, filter):
return list(self.client_groups.find(filter, selection))
def set_volume_mount(self, volume_mount):
+ self._ensure_mongo_connection()
volume_mount['module_name_lc'] = volume_mount['module_name'].lower()
return self._check_update_result(self.volume_mounts.replace_one(
{
@@ -1052,6 +1171,7 @@ def set_volume_mount(self, volume_mount):
))
def remove_volume_mount(self, volume_mount):
+ self._ensure_mongo_connection()
volume_mount['module_name_lc'] = volume_mount['module_name'].lower()
return self._check_update_result(self.volume_mounts.delete_one(
{
@@ -1061,6 +1181,7 @@ def remove_volume_mount(self, volume_mount):
}))
def list_volume_mounts(self, filter):
+ self._ensure_mongo_connection()
selection = {"_id": 0, "module_name_lc": 0}
if 'module_name' in filter:
filter['module_name_lc'] = filter['module_name'].lower()
@@ -1099,6 +1220,7 @@ def _check_update_result(self, result):
def add_exec_stats_raw(self, user_id, app_module_name, app_id, func_module_name, func_name,
git_commit_hash, creation_time, exec_start_time, finish_time, is_error,
job_id):
+ self._ensure_mongo_connection()
stats = {
'user_id': user_id,
'app_module_name': app_module_name,
@@ -1116,6 +1238,7 @@ def add_exec_stats_raw(self, user_id, app_module_name, app_id, func_module_name,
def add_exec_stats_apps(self, app_module_name, app_id, creation_time, exec_start_time,
finish_time, is_error, type, time_range):
+ self._ensure_mongo_connection()
if not app_id:
return
full_app_id = app_id
@@ -1138,6 +1261,7 @@ def add_exec_stats_apps(self, app_module_name, app_id, creation_time, exec_start
def add_exec_stats_users(self, user_id, creation_time, exec_start_time,
finish_time, is_error, type, time_range):
+ self._ensure_mongo_connection()
queue_time = exec_start_time - creation_time
exec_time = finish_time - exec_start_time
inc_data = {
@@ -1151,6 +1275,7 @@ def add_exec_stats_users(self, user_id, creation_time, exec_start_time,
{'$inc': inc_data}, upsert=True)
def get_exec_stats_apps(self, full_app_ids, type, time_range):
+ self._ensure_mongo_connection()
filter = {}
if full_app_ids:
filter['full_app_id'] = {'$in': full_app_ids}
@@ -1171,7 +1296,7 @@ def get_exec_stats_apps(self, full_app_ids, type, time_range):
return list(self.exec_stats_apps.find(filter, selection))
def aggr_exec_stats_table(self, minTime, maxTime):
-
+ self._ensure_mongo_connection()
# setup the query
aggParams = None
group = {
@@ -1225,7 +1350,7 @@ def aggr_exec_stats_table(self, minTime, maxTime):
return counts
def get_exec_raw_stats(self, minTime, maxTime):
-
+ self._ensure_mongo_connection()
filter = {}
creationTimeFilter = {}
if minTime is not None:
@@ -1240,6 +1365,7 @@ def get_exec_raw_stats(self, minTime, maxTime):
return list(self.exec_stats_raw.find(filter, {'_id': 0}))
def set_secure_config_params(self, data_list):
+ self._ensure_mongo_connection()
for param_data in data_list:
param_data['module_name_lc'] = param_data['module_name'].lower()
param_data['version'] = param_data.get('version', '')
@@ -1253,6 +1379,7 @@ def set_secure_config_params(self, data_list):
upsert=True)
def remove_secure_config_params(self, data_list):
+ self._ensure_mongo_connection()
for param_data in data_list:
param_data['module_name_lc'] = param_data['module_name'].lower()
param_data['version'] = param_data.get('version', '')
@@ -1264,6 +1391,7 @@ def remove_secure_config_params(self, data_list):
})
def get_secure_config_params(self, module_name):
+ self._ensure_mongo_connection()
selection = {"_id": 0, "module_name_lc": 0}
filter = {"module_name_lc": module_name.lower()}
return list(self.secure_config_params.find(filter, selection))
@@ -1272,27 +1400,28 @@ def get_secure_config_params(self, module_name):
# todo: add 'in-progress' flag so if something goes done during an update, or if
# another server is already starting an update, we can skip or abort
- def check_db_schema(self):
+ def check_db_schema(self, mongo_client):
+ db = mongo_client[self.mongo_db]
- db_version = self.get_db_version()
+ db_version = self.get_db_version(db)
print('db_version=' + str(db_version))
if db_version < 2:
print('Updating DB schema to V2...')
- self.update_db_1_to_2()
- self.update_db_version(2)
+ self.update_db_1_to_2(db)
+ self.update_db_version(2, db)
print('done.')
if db_version < 3:
print('Updating DB schema to V3...')
- self.update_db_2_to_3()
- self.update_db_version(3)
+ self.update_db_2_to_3(db)
+ self.update_db_version(3, db)
print('done.')
if db_version < 4:
print('Updating DB schema to V4...')
- self.update_db_3_to_4()
- self.update_db_version(4)
+ self.update_db_3_to_4(db)
+ self.update_db_version(4, db)
print('done.')
if db_version > 4:
@@ -1301,34 +1430,37 @@ def check_db_schema(self):
'Incompatible DB versions. Expecting DB V4, found DV V' + str(db_version) +
'. You are probably running an old version of the service. Start up failed.')
- def get_db_version(self):
- # version is a collection that should only have a single
- version_collection = self.db[MongoCatalogDBI._DB_VERSION]
+ def get_db_version(self, db):
+
+ # version is a collection that should only have a single
+ version_collection = db[MongoCatalogDBI._DB_VERSION]
ver = version_collection.find_one({})
if (ver):
return ver['version']
else:
# if there is no version document, then we are DB v1
- self.update_db_version(1)
+ self.update_db_version(1, db)
return 1
- def update_db_version(self, version):
+ def update_db_version(self, version, db):
+
# make sure we can't have two version documents
- version_collection = self.db[MongoCatalogDBI._DB_VERSION]
+ version_collection = db[MongoCatalogDBI._DB_VERSION]
version_collection.create_index('version_doc', unique=True, sparse=False)
version_collection.update_one({'version_doc': True}, {'$set': {'version': version}},
upsert=True)
# version 1 kept released module versions in a map, version 2 updates that to a list
# and adds dynamic service tags
- def update_db_1_to_2(self):
- for m in self.modules.find({'release_versions': {'$exists': True}}):
+ def update_db_1_to_2(self, db):
+ modules_collection = db[MongoCatalogDBI._MODULES]
+ for m in modules_collection.find({'release_versions': {'$exists': True}}):
release_version_list = []
for timestamp in m['release_versions']:
m['release_versions'][timestamp]['dynamic_service'] = 0
release_version_list.append(m['release_versions'][timestamp])
- self.modules.update_one(
+ modules_collection.update_one(
{'_id': m['_id']},
{
'$unset': {'release_versions': ''},
@@ -1337,49 +1469,54 @@ def update_db_1_to_2(self):
# make sure everything has the dynamic service flag
if not 'dynamic_service' in m['info']:
- self.modules.update_one(
+ modules_collection.update_one(
{'_id': m['_id']},
{'$set': {'info.dynamic_service': 0}})
if m['current_versions']['release']:
if not 'dynamic_service' in m['current_versions']['release']:
- self.modules.update_one(
+ modules_collection.update_one(
{'_id': m['_id']},
{'$set': {'current_versions.release.dynamic_service': 0}})
if m['current_versions']['beta']:
if not 'dynamic_service' in m['current_versions']['beta']:
- self.modules.update_one(
+ modules_collection.update_one(
{'_id': m['_id']},
{'$set': {'current_versions.beta.dynamic_service': 0}})
if m['current_versions']['dev']:
if not 'dynamic_service' in m['current_versions']['dev']:
- self.modules.update_one(
+ modules_collection.update_one(
{'_id': m['_id']},
{'$set': {'current_versions.dev.dynamic_service': 0}})
# also ensure the execution stats fields have correct names
- self.exec_stats_apps.update_many({'avg_queue_time': {'$exists': True}},
+ exec_stats_apps_collection = db[MongoCatalogDBI._EXEC_STATS_APPS]
+ exec_stats_apps_collection.update_many({'avg_queue_time': {'$exists': True}},
{'$rename': {'avg_queue_time': 'total_queue_time',
'avg_exec_time': 'total_exec_time'}})
- self.exec_stats_users.update_many({'avg_queue_time': {'$exists': True}},
+
+ exec_stats_users_collection = db[MongoCatalogDBI._EXEC_STATS_USERS]
+ exec_stats_users_collection.update_many({'avg_queue_time': {'$exists': True}},
{'$rename': {'avg_queue_time': 'total_queue_time',
'avg_exec_time': 'total_exec_time'}})
# version 3 moves the module version information out of the module document into
- # a separate module versions collection.
- def update_db_2_to_3(self):
+ # a separate module versions collection.
+ def update_db_2_to_3(self, db):
- self.module_versions.create_index('module_name_lc', sparse=False)
- self.module_versions.create_index('git_commit_hash', sparse=False)
- self.module_versions.create_index([
+ module_versions_collection = db[MongoCatalogDBI._MODULE_VERSIONS]
+ module_versions_collection.create_index('module_name_lc', sparse=False)
+ module_versions_collection.create_index('git_commit_hash', sparse=False)
+ module_versions_collection.create_index([
('module_name_lc', ASCENDING),
('git_commit_hash', ASCENDING)],
unique=True, sparse=False)
+ modules_collection = db[MongoCatalogDBI._MODULES]
# update all module versions
- for m in self.modules.find({}):
+ for m in modules_collection.find({}):
# skip modules that have not been properly registered, might want to delete these later
if 'module_name' not in m or 'module_name_lc' not in m:
@@ -1395,14 +1532,14 @@ def update_db_2_to_3(self):
rVer['released'] = 1
self.prepare_version_doc_for_db_2_to_3_update(rVer, m)
try:
- self.module_versions.insert_one(rVer)
+ module_versions_collection.insert_one(rVer)
except:
print(' - Warning - ' + rVer['module_name'] + '.' + rVer[
'git_commit_hash'] + ' already inserted, skipping.')
new_release_version_list.append({
'git_commit_hash': rVer['git_commit_hash']
})
- self.modules.update_one(
+ modules_collection.update_one(
{'_id': m['_id']},
{'$set': {'release_version_list': new_release_version_list}}
)
@@ -1415,19 +1552,19 @@ def update_db_2_to_3(self):
self.prepare_version_doc_for_db_2_to_3_update(modVer, m)
if modVer.get('git_commit_hash') is not None:
try:
- self.module_versions.insert_one(modVer)
+ module_versions_collection.insert_one(modVer)
except Exception as e:
# we expect this to happen for all 'release' tags and if, say, a
# version still tagged as dev/beta has been released
print(f" - Warning - {tag} ver of {modVer['module_name']}."
f"{modVer['git_commit_hash']} already inserted, skipping.")
- self.modules.update_one(
+ modules_collection.update_one(
{'_id': m['_id']},
{'$set': {'current_versions.' + tag: {
'git_commit_hash': modVer['git_commit_hash']}}}
)
else:
- self.modules.update_one(
+ modules_collection.update_one(
{'_id': m['_id']},
{'$set': {'current_versions.' + tag: None}}
)
@@ -1452,23 +1589,26 @@ def prepare_version_doc_for_db_2_to_3_update(self, version, module):
version['release_timestamp'] = None
# version 4 performs a small modification to the client group structure and volume_mounts structure
- def update_db_3_to_4(self):
+ def update_db_3_to_4(self, db):
# make sure we don't have any indecies on the collections
- self.volume_mounts.drop_indexes()
- self.client_groups.drop_indexes()
+ volume_mounts_collection = db[MongoCatalogDBI._VOLUME_MOUNTS]
+ client_groups_collection = db[MongoCatalogDBI._CLIENT_GROUPS]
+
+ volume_mounts_collection.drop_indexes()
+ client_groups_collection.drop_indexes()
# update the volume_mounts, just need to rename app_id to function_name
- for vm in self.volume_mounts.find({}):
+ for vm in volume_mounts_collection.find({}):
if 'app_id' in vm and 'function_name' not in vm:
- self.volume_mounts.update_one(
+ volume_mounts_collection.update_one(
{'_id': vm['_id']},
{'$set': {'function_name': vm['app_id']}, '$unset': {'app_id': 1}}
)
- for cg in self.client_groups.find({}):
+ for cg in client_groups_collection.find({}):
if 'app_id' in cg:
- self.client_groups.delete_one({'_id': cg['_id']})
+ client_groups_collection.delete_one({'_id': cg['_id']})
tokens = cg['app_id'].split('/')
if len(tokens) != 2:
print(
@@ -1480,4 +1620,4 @@ def update_db_3_to_4(self):
'function_name': tokens[1],
'client_groups': cg['client_groups']
}
- self.client_groups.insert_one(new_cg)
+ client_groups_collection.insert_one(new_cg)
\ No newline at end of file
diff --git a/lib/biokbase/catalog/registrar.py b/lib/biokbase/catalog/registrar.py
index b10d158f..d8226cbc 100644
--- a/lib/biokbase/catalog/registrar.py
+++ b/lib/biokbase/catalog/registrar.py
@@ -252,7 +252,7 @@ def sanity_checks_and_parse(self, basedir, git_commit_hash):
with codecs.open(os.path.join(basedir, yaml_filename), 'r', "utf-8",
errors='ignore') as kb_yaml_file:
kb_yaml_string = kb_yaml_file.read()
- self.kb_yaml = yaml.load(kb_yaml_string)
+ self.kb_yaml = yaml.safe_load(kb_yaml_string)
self.log('=====kbase.yaml parse:')
self.log(pprint.pformat(self.kb_yaml))
self.log('=====end kbase.yaml')
diff --git a/lib/biokbase/catalog/version.py b/lib/biokbase/catalog/version.py
index ccef70f8..33363d57 100644
--- a/lib/biokbase/catalog/version.py
+++ b/lib/biokbase/catalog/version.py
@@ -1,2 +1,2 @@
# File that simply defines version information
-CATALOG_VERSION = '2.2.4'
+CATALOG_VERSION = '2.3.1'
diff --git a/lib/biokbase/log.py b/lib/biokbase/log.py
new file mode 100644
index 00000000..59748cea
--- /dev/null
+++ b/lib/biokbase/log.py
@@ -0,0 +1,371 @@
+"""
+NAME
+ log
+
+DESCRIPTION
+ A library for sending logging messages to syslog.
+
+METHODS
+ log(string subsystem, hashref constraints): Initializes log. You
+ should call this at the beginning of your program. Constraints are
+ optional.
+
+ log_message(int level, string message): sends log message to syslog.
+
+ * level: (0-9) The logging level for this message is compared to
+ the logging level that has been set in log. If it is <=
+ the set logging level, the message will be sent to syslog,
+ otherwise it will be ignored. Logging level is set to 6
+ if control API cannot be reached and the user does
+ not set the log level. Log level can also be entered as
+ string (e.g. 'DEBUG')
+
+ * message: This is the log message.
+
+ get_log_level(): Returns the current log level as an integer.
+
+ set_log_level(integer level) : Sets the log level. Only use this if you
+ wish to override the log levels that are defined by the control API.
+ Can also be entered as string (e.g. 'DEBUG')
+
+ * level : priority
+
+ * 0 : EMERG - system is unusable
+
+ * 1 : ALERT - component must be fixed immediately
+
+ * 2 : CRIT - secondary component must be fixed immediately
+
+ * 3 : ERR - non-urgent failure
+
+ * 4 : WARNING - warning that an error will occur if no action
+ is taken
+
+ * 5 : NOTICE - unusual but safe conditions
+
+ * 6 : INFO - normal operational messages
+
+ * 7 : DEBUG - lowest level of debug
+
+ * 8 : DEBUG2 - second level of debug
+
+ * 9 : DEBUG3 - highest level of debug
+
+ set_log_msg_check_count(integer count): used to set the number the
+ messages that log will log before querying the control API for the
+ log level (default is 100 messages).
+
+ set_log_msg_check_interval(integer seconds): used to set the interval,
+ in seconds, that will be allowed to pass before log will query the
+ control API for the log level (default is 300 seconds).
+
+ update_api_log_level() : Checks the control API for the currently set
+ log level.
+
+ use_api_log_level() : Removes the user-defined log level and tells log
+ to use the control API-defined log level.
+"""
+
+# Copied from sample service, the log.py version in lib is python2 only
+# TODO LOGGING rework this to just log to stdout, rethink logging in general
+
+import json as _json
+import urllib.request as _urllib2
+import syslog as _syslog
+import platform as _platform
+import inspect as _inspect
+import os as _os
+import getpass as _getpass
+import warnings as _warnings
+from configparser import ConfigParser as _ConfigParser
+import time
+
+MLOG_ENV_FILE = 'MLOG_CONFIG_FILE'
+_GLOBAL = 'global'
+MLOG_LOG_LEVEL = 'mlog_log_level'
+MLOG_API_URL = 'mlog_api_url'
+MLOG_LOG_FILE = 'mlog_log_file'
+
+DEFAULT_LOG_LEVEL = 6
+#MSG_CHECK_COUNT = 100
+#MSG_CHECK_INTERVAL = 300 # 300s = 5min
+MSG_FACILITY = _syslog.LOG_LOCAL1
+EMERG_FACILITY = _syslog.LOG_LOCAL0
+
+EMERG = 0
+ALERT = 1
+CRIT = 2
+ERR = 3
+WARNING = 4
+NOTICE = 5
+INFO = 6
+DEBUG = 7
+DEBUG2 = 8
+DEBUG3 = 9
+_MLOG_TEXT_TO_LEVEL = {'EMERG': EMERG,
+ 'ALERT': ALERT,
+ 'CRIT': CRIT,
+ 'ERR': ERR,
+ 'WARNING': WARNING,
+ 'NOTICE': NOTICE,
+ 'INFO': INFO,
+ 'DEBUG': DEBUG,
+ 'DEBUG2': DEBUG2,
+ 'DEBUG3': DEBUG3,
+ }
+_MLOG_TO_SYSLOG = [_syslog.LOG_EMERG, _syslog.LOG_ALERT, _syslog.LOG_CRIT,
+ _syslog.LOG_ERR, _syslog.LOG_WARNING, _syslog.LOG_NOTICE,
+ _syslog.LOG_INFO, _syslog.LOG_DEBUG, _syslog.LOG_DEBUG,
+ _syslog.LOG_DEBUG]
+#ALLOWED_LOG_LEVELS = set(_MLOG_TEXT_TO_LEVEL.values())
+_MLOG_LEVEL_TO_TEXT = {}
+for k, v in _MLOG_TEXT_TO_LEVEL.items():
+ _MLOG_LEVEL_TO_TEXT[v] = k
+LOG_LEVEL_MIN = min(_MLOG_LEVEL_TO_TEXT.keys())
+LOG_LEVEL_MAX = max(_MLOG_LEVEL_TO_TEXT.keys())
+del k, v
+
+
+class log(object):
+ """
+ This class contains the methods necessary for sending log messages.
+ """
+
+ def __init__(self, subsystem, constraints=None, config=None, logfile=None,
+ ip_address=False, authuser=False, module=False,
+ method=False, call_id=False, changecallback=None):
+ if not subsystem:
+ raise ValueError("Subsystem must be supplied")
+
+ self.user = _getpass.getuser()
+ self.parentfile = _os.path.abspath(_inspect.getfile(
+ _inspect.stack()[1][0]))
+ self.ip_address = ip_address
+ self.authuser = authuser
+ self.module = module
+ self.method = method
+ self.call_id = call_id
+ noop = lambda: None
+ self._callback = changecallback or noop
+ self._subsystem = str(subsystem)
+ self._mlog_config_file = config
+ if not self._mlog_config_file:
+ self._mlog_config_file = _os.environ.get(MLOG_ENV_FILE, None)
+ if self._mlog_config_file:
+ self._mlog_config_file = str(self._mlog_config_file)
+ self._user_log_level = -1
+ self._config_log_level = -1
+ self._user_log_file = logfile
+ self._config_log_file = None
+ self._api_log_level = -1
+ self._msgs_since_config_update = 0
+ self._time_at_config_update = time.time()
+ self.msg_count = 0
+ self._recheck_api_msg = 100
+ self._recheck_api_time = 300 # 5 mins
+ self._log_constraints = {} if not constraints else constraints
+
+ self._init = True
+ self.update_config()
+ self._init = False
+
+ def _get_time_since_start(self):
+ time_diff = time.time() - self._time_at_config_update
+ return time_diff
+
+ def get_log_level(self):
+ if(self._user_log_level != -1):
+ return self._user_log_level
+ elif(self._config_log_level != -1):
+ return self._config_log_level
+ elif(self._api_log_level != -1):
+ return self._api_log_level
+ else:
+ return DEFAULT_LOG_LEVEL
+
+ def _get_config_items(self, cfg, section):
+ cfgitems = {}
+ if cfg.has_section(section):
+ for k, v in cfg.items(section):
+ cfgitems[k] = v
+ return cfgitems
+
+ def update_config(self):
+ loglevel = self.get_log_level()
+ logfile = self.get_log_file()
+
+ self._api_log_level = -1
+ self._msgs_since_config_update = 0
+ self._time_at_config_update = time.time()
+
+ # Retrieving the control API defined log level
+ api_url = None
+ if self._mlog_config_file and _os.path.isfile(self._mlog_config_file):
+ cfg = _ConfigParser()
+ cfg.read(self._mlog_config_file)
+ cfgitems = self._get_config_items(cfg, _GLOBAL)
+ cfgitems.update(self._get_config_items(cfg, self._subsystem))
+ if MLOG_LOG_LEVEL in cfgitems:
+ try:
+ self._config_log_level = int(cfgitems[MLOG_LOG_LEVEL])
+ except:
+ _warnings.warn(
+ 'Cannot parse log level {} from file {} to int'.format(
+ cfgitems[MLOG_LOG_LEVEL], self._mlog_config_file)
+ + '. Keeping current log level.')
+ if MLOG_API_URL in cfgitems:
+ api_url = cfgitems[MLOG_API_URL]
+ if MLOG_LOG_FILE in cfgitems:
+ self._config_log_file = cfgitems[MLOG_LOG_FILE]
+ elif self._mlog_config_file:
+ _warnings.warn('Cannot read config file ' + self._mlog_config_file)
+
+ if (api_url):
+ subsystem_api_url = api_url + "/" + self._subsystem
+ try:
+ data = _json.load(_urllib2.urlopen(subsystem_api_url,
+ timeout=5))
+ except _urllib2.URLError as e:
+ code_ = None
+ if hasattr(e, 'code'):
+ code_ = ' ' + str(e.code)
+ _warnings.warn(
+ 'Could not connect to mlog api server at ' +
+ '{}:{} {}. Using default log level {}.'.format(
+ subsystem_api_url, code_, str(e.reason),
+ str(DEFAULT_LOG_LEVEL)))
+ else:
+ max_matching_level = -1
+ for constraint_set in data['log_levels']:
+ level = constraint_set['level']
+ constraints = constraint_set['constraints']
+ if level <= max_matching_level:
+ continue
+
+ matches = 1
+ for constraint in constraints:
+ if constraint not in self._log_constraints:
+ matches = 0
+ elif (self._log_constraints[constraint] !=
+ constraints[constraint]):
+ matches = 0
+
+ if matches == 1:
+ max_matching_level = level
+
+ self._api_log_level = max_matching_level
+ if ((self.get_log_level() != loglevel or
+ self.get_log_file() != logfile) and not self._init):
+ self._callback()
+
+ def _resolve_log_level(self, level):
+ if(level in _MLOG_TEXT_TO_LEVEL):
+ level = _MLOG_TEXT_TO_LEVEL[level]
+ elif(level not in _MLOG_LEVEL_TO_TEXT):
+ raise ValueError('Illegal log level')
+ return level
+
+ def set_log_level(self, level):
+ self._user_log_level = self._resolve_log_level(level)
+ self._callback()
+
+ def get_log_file(self):
+ if self._user_log_file:
+ return self._user_log_file
+ if self._config_log_file:
+ return self._config_log_file
+ return None
+
+ def set_log_file(self, filename):
+ self._user_log_file = filename
+ self._callback()
+
+ def set_log_msg_check_count(self, count):
+ count = int(count)
+ if count < 0:
+ raise ValueError('Cannot check a negative number of messages')
+ self._recheck_api_msg = count
+
+ def set_log_msg_check_interval(self, interval):
+ interval = int(interval)
+ if interval < 0:
+ raise ValueError('interval must be positive')
+ self._recheck_api_time = interval
+
+ def clear_user_log_level(self):
+ self._user_log_level = -1
+ self._callback()
+
+ def _get_ident(self, level, user, parentfile, ip_address, authuser, module,
+ method, call_id):
+ infos = [self._subsystem, _MLOG_LEVEL_TO_TEXT[level],
+ repr(time.time()), user, parentfile, str(_os.getpid())]
+ if self.ip_address:
+ infos.append(str(ip_address) if ip_address else '-')
+ if self.authuser:
+ infos.append(str(authuser) if authuser else '-')
+ if self.module:
+ infos.append(str(module) if module else '-')
+ if self.method:
+ infos.append(str(method) if method else '-')
+ if self.call_id:
+ infos.append(str(call_id) if call_id else '-')
+ return "[" + "] [".join(infos) + "]"
+
+ def _syslog(self, facility, level, ident, message):
+ _syslog.openlog(ident, facility)
+ if isinstance(message, str):
+ _syslog.syslog(_MLOG_TO_SYSLOG[level], message)
+ else:
+ try:
+ for m in message:
+ _syslog.syslog(_MLOG_TO_SYSLOG[level], m)
+ except TypeError:
+ _syslog.syslog(_MLOG_TO_SYSLOG[level], str(message))
+ _syslog.closelog()
+
+ def _log(self, ident, message):
+ ident = ' '.join([str(time.strftime(
+ "%Y-%m-%d %H:%M:%S", time.localtime())),
+ _platform.node(), ident + ': '])
+ try:
+ with open(self.get_log_file(), 'a') as log:
+ if isinstance(message, str):
+ log.write(ident + message + '\n')
+ else:
+ try:
+ for m in message:
+ log.write(ident + m + '\n')
+ except TypeError:
+ log.write(ident + str(message) + '\n')
+ except Exception as e:
+ err = 'Could not write to log file ' + str(self.get_log_file()) + \
+ ': ' + str(e) + '.'
+ _warnings.warn(err)
+
+ def log_message(self, level, message, ip_address=None, authuser=None,
+ module=None, method=None, call_id=None):
+# message = str(message)
+ level = self._resolve_log_level(level)
+
+ self.msg_count += 1
+ self._msgs_since_config_update += 1
+
+ if(self._msgs_since_config_update >= self._recheck_api_msg
+ or self._get_time_since_start() >= self._recheck_api_time):
+ self.update_config()
+
+ ident = self._get_ident(level, self.user, self.parentfile, ip_address,
+ authuser, module, method, call_id)
+ # If this message is an emergency, send a copy to the emergency
+ # facility first.
+ if(level == 0):
+ self._syslog(EMERG_FACILITY, level, ident, message)
+
+ if(level <= self.get_log_level()):
+ self._syslog(MSG_FACILITY, level, ident, message)
+ if self.get_log_file():
+ self._log(ident, message)
+
+if __name__ == '__main__':
+ pass
diff --git a/lib/java/us/kbase/catalog/CurrentRepoParams.java b/lib/java/us/kbase/catalog/CurrentRepoParams.java
deleted file mode 100644
index f433d68e..00000000
--- a/lib/java/us/kbase/catalog/CurrentRepoParams.java
+++ /dev/null
@@ -1,82 +0,0 @@
-
-package us.kbase.catalog;
-
-import java.util.HashMap;
-import java.util.Map;
-import javax.annotation.Generated;
-import com.fasterxml.jackson.annotation.JsonAnyGetter;
-import com.fasterxml.jackson.annotation.JsonAnySetter;
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonPropertyOrder;
-
-
-/**
- *
Original spec-file type: CurrentRepoParams
- *
- * Describes how to find repository details.
- * module_name - name of module defined in kbase.yaml file;
- * with_disabled - optional flag adding disabled repos (default value is false).
- *
- * Describes how to find repository details (including old versions). In case neither of
- * version and git_commit_hash is specified last version is returned.
- * module_name - name of module defined in kbase.yaml file;
- * timestamp - optional parameter limiting search by certain version timestamp;
- * git_commit_hash - optional parameter limiting search by certain git commit hash;
- * with_disabled - optional flag adding disabled repos (default value is false).
- *
- * method_ids - list of method ids (each id is fully qualified, i.e. contains module
- * name prefix followed by slash);
- * widget_ids - list of widget ids (each id is name of JavaScript file stored in
- * repo's 'ui/widgets' folder).
- *
- * Describes how to find module/repository details.
- * module_name - name of module defined in kbase.yaml file;
- * git_url - the url used to register the module
- * include_disabled - optional flag, set to true to include disabled repos
- *
- * Describes how to find repository details.
- * module_name - name of module defined in kbase.yaml file;
- * multiple state fields? (approvalState, buildState, versionState)
- * state - one of 'pending', 'ready', 'building', 'testing', 'disabled'.
- *
- *
- */
-@JsonInclude(JsonInclude.Include.NON_NULL)
-@Generated("com.googlecode.jsonschema2pojo")
-@JsonPropertyOrder({
- "module_name",
- "github_repo",
- "registration_state",
- "error_message"
-})
-public class SetRepoStateParams {
-
- @JsonProperty("module_name")
- private String moduleName;
- @JsonProperty("github_repo")
- private String githubRepo;
- @JsonProperty("registration_state")
- private String registrationState;
- @JsonProperty("error_message")
- private String errorMessage;
- private Map additionalProperties = new HashMap();
-
- @JsonProperty("module_name")
- public String getModuleName() {
- return moduleName;
- }
-
- @JsonProperty("module_name")
- public void setModuleName(String moduleName) {
- this.moduleName = moduleName;
- }
-
- public SetRepoStateParams withModuleName(String moduleName) {
- this.moduleName = moduleName;
- return this;
- }
-
- @JsonProperty("github_repo")
- public String getGithubRepo() {
- return githubRepo;
- }
-
- @JsonProperty("github_repo")
- public void setGithubRepo(String githubRepo) {
- this.githubRepo = githubRepo;
- }
-
- public SetRepoStateParams withGithubRepo(String githubRepo) {
- this.githubRepo = githubRepo;
- return this;
- }
-
- @JsonProperty("registration_state")
- public String getRegistrationState() {
- return registrationState;
- }
-
- @JsonProperty("registration_state")
- public void setRegistrationState(String registrationState) {
- this.registrationState = registrationState;
- }
-
- public SetRepoStateParams withRegistrationState(String registrationState) {
- this.registrationState = registrationState;
- return this;
- }
-
- @JsonProperty("error_message")
- public String getErrorMessage() {
- return errorMessage;
- }
-
- @JsonProperty("error_message")
- public void setErrorMessage(String errorMessage) {
- this.errorMessage = errorMessage;
- }
-
- public SetRepoStateParams withErrorMessage(String errorMessage) {
- this.errorMessage = errorMessage;
- return this;
- }
-
- @JsonAnyGetter
- public Map getAdditionalProperties() {
- return this.additionalProperties;
- }
-
- @JsonAnySetter
- public void setAdditionalProperties(String name, Object value) {
- this.additionalProperties.put(name, value);
- }
-
- @Override
- public String toString() {
- return ((((((((((("SetRepoStateParams"+" [moduleName=")+ moduleName)+", githubRepo=")+ githubRepo)+", registrationState=")+ registrationState)+", errorMessage=")+ errorMessage)+", additionalProperties=")+ additionalProperties)+"]");
- }
-
-}
diff --git a/narrative_method_store b/narrative_method_store
deleted file mode 160000
index fb85ae8b..00000000
--- a/narrative_method_store
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit fb85ae8b6bea39c4834676e7cfbd2596c7bb182a
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index e3190618..00000000
--- a/requirements.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-pymongo==3.7
-docker>=3.5
-gitpython
-pyyaml
-semantic_version
-coverage
\ No newline at end of file
diff --git a/settings.gradle b/settings.gradle
new file mode 100644
index 00000000..1d01a90a
--- /dev/null
+++ b/settings.gradle
@@ -0,0 +1,8 @@
+/*
+ * This file was generated by the Gradle 'init' task.
+ *
+ * The settings file is used to specify which projects to include in your build.
+ * For more detailed information on multi-project builds, please refer to https://docs.gradle.org/8.14.3/userguide/multi_project_builds.html in the Gradle documentation.
+ */
+
+rootProject.name = 'catalog'
diff --git a/lib/java/us/kbase/catalog/AppClientGroup.java b/src/main/java/us/kbase/catalog/AppClientGroup.java
similarity index 100%
rename from lib/java/us/kbase/catalog/AppClientGroup.java
rename to src/main/java/us/kbase/catalog/AppClientGroup.java
diff --git a/lib/java/us/kbase/catalog/BasicModuleInfo.java b/src/main/java/us/kbase/catalog/BasicModuleInfo.java
similarity index 100%
rename from lib/java/us/kbase/catalog/BasicModuleInfo.java
rename to src/main/java/us/kbase/catalog/BasicModuleInfo.java
diff --git a/lib/java/us/kbase/catalog/BasicModuleVersionInfo.java b/src/main/java/us/kbase/catalog/BasicModuleVersionInfo.java
similarity index 100%
rename from lib/java/us/kbase/catalog/BasicModuleVersionInfo.java
rename to src/main/java/us/kbase/catalog/BasicModuleVersionInfo.java
diff --git a/lib/java/us/kbase/catalog/BuildInfo.java b/src/main/java/us/kbase/catalog/BuildInfo.java
similarity index 100%
rename from lib/java/us/kbase/catalog/BuildInfo.java
rename to src/main/java/us/kbase/catalog/BuildInfo.java
diff --git a/lib/java/us/kbase/catalog/BuildLog.java b/src/main/java/us/kbase/catalog/BuildLog.java
similarity index 100%
rename from lib/java/us/kbase/catalog/BuildLog.java
rename to src/main/java/us/kbase/catalog/BuildLog.java
diff --git a/lib/java/us/kbase/catalog/BuildLogLine.java b/src/main/java/us/kbase/catalog/BuildLogLine.java
similarity index 100%
rename from lib/java/us/kbase/catalog/BuildLogLine.java
rename to src/main/java/us/kbase/catalog/BuildLogLine.java
diff --git a/lib/java/us/kbase/catalog/CatalogClient.java b/src/main/java/us/kbase/catalog/CatalogClient.java
similarity index 99%
rename from lib/java/us/kbase/catalog/CatalogClient.java
rename to src/main/java/us/kbase/catalog/CatalogClient.java
index a5e6505e..8b44ec37 100644
--- a/lib/java/us/kbase/catalog/CatalogClient.java
+++ b/src/main/java/us/kbase/catalog/CatalogClient.java
@@ -101,6 +101,7 @@ public boolean isInsecureHttpConnectionAllowed() {
/** Deprecated. Use isInsecureHttpConnectionAllowed().
* @deprecated
*/
+ @Deprecated
public boolean isAuthAllowedForHttp() {
return caller.isAuthAllowedForHttp();
}
@@ -116,6 +117,7 @@ public void setIsInsecureHttpConnectionAllowed(boolean allowed) {
/** Deprecated. Use setIsInsecureHttpConnectionAllowed().
* @deprecated
*/
+ @Deprecated
public void setAuthAllowedForHttp(boolean isAuthAllowedForHttp) {
caller.setAuthAllowedForHttp(isAuthAllowedForHttp);
}
diff --git a/lib/java/us/kbase/catalog/ClientGroupConfig.java b/src/main/java/us/kbase/catalog/ClientGroupConfig.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ClientGroupConfig.java
rename to src/main/java/us/kbase/catalog/ClientGroupConfig.java
diff --git a/lib/java/us/kbase/catalog/ClientGroupFilter.java b/src/main/java/us/kbase/catalog/ClientGroupFilter.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ClientGroupFilter.java
rename to src/main/java/us/kbase/catalog/ClientGroupFilter.java
diff --git a/lib/java/us/kbase/catalog/CompilationReport.java b/src/main/java/us/kbase/catalog/CompilationReport.java
similarity index 100%
rename from lib/java/us/kbase/catalog/CompilationReport.java
rename to src/main/java/us/kbase/catalog/CompilationReport.java
diff --git a/lib/java/us/kbase/catalog/ExecAggrStats.java b/src/main/java/us/kbase/catalog/ExecAggrStats.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ExecAggrStats.java
rename to src/main/java/us/kbase/catalog/ExecAggrStats.java
diff --git a/lib/java/us/kbase/catalog/ExecAggrTableParams.java b/src/main/java/us/kbase/catalog/ExecAggrTableParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ExecAggrTableParams.java
rename to src/main/java/us/kbase/catalog/ExecAggrTableParams.java
diff --git a/lib/java/us/kbase/catalog/FavoriteCount.java b/src/main/java/us/kbase/catalog/FavoriteCount.java
similarity index 100%
rename from lib/java/us/kbase/catalog/FavoriteCount.java
rename to src/main/java/us/kbase/catalog/FavoriteCount.java
diff --git a/lib/java/us/kbase/catalog/FavoriteItem.java b/src/main/java/us/kbase/catalog/FavoriteItem.java
similarity index 100%
rename from lib/java/us/kbase/catalog/FavoriteItem.java
rename to src/main/java/us/kbase/catalog/FavoriteItem.java
diff --git a/lib/java/us/kbase/catalog/FavoriteUser.java b/src/main/java/us/kbase/catalog/FavoriteUser.java
similarity index 100%
rename from lib/java/us/kbase/catalog/FavoriteUser.java
rename to src/main/java/us/kbase/catalog/FavoriteUser.java
diff --git a/lib/java/us/kbase/catalog/Function.java b/src/main/java/us/kbase/catalog/Function.java
similarity index 100%
rename from lib/java/us/kbase/catalog/Function.java
rename to src/main/java/us/kbase/catalog/Function.java
diff --git a/lib/java/us/kbase/catalog/FunctionPlace.java b/src/main/java/us/kbase/catalog/FunctionPlace.java
similarity index 100%
rename from lib/java/us/kbase/catalog/FunctionPlace.java
rename to src/main/java/us/kbase/catalog/FunctionPlace.java
diff --git a/lib/java/us/kbase/catalog/GetBuildLogParams.java b/src/main/java/us/kbase/catalog/GetBuildLogParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/GetBuildLogParams.java
rename to src/main/java/us/kbase/catalog/GetBuildLogParams.java
diff --git a/lib/java/us/kbase/catalog/GetClientGroupParams.java b/src/main/java/us/kbase/catalog/GetClientGroupParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/GetClientGroupParams.java
rename to src/main/java/us/kbase/catalog/GetClientGroupParams.java
diff --git a/lib/java/us/kbase/catalog/GetExecAggrStatsParams.java b/src/main/java/us/kbase/catalog/GetExecAggrStatsParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/GetExecAggrStatsParams.java
rename to src/main/java/us/kbase/catalog/GetExecAggrStatsParams.java
diff --git a/lib/java/us/kbase/catalog/GetExecRawStatsParams.java b/src/main/java/us/kbase/catalog/GetExecRawStatsParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/GetExecRawStatsParams.java
rename to src/main/java/us/kbase/catalog/GetExecRawStatsParams.java
diff --git a/lib/java/us/kbase/catalog/GetLocalFunctionDetails.java b/src/main/java/us/kbase/catalog/GetLocalFunctionDetails.java
similarity index 100%
rename from lib/java/us/kbase/catalog/GetLocalFunctionDetails.java
rename to src/main/java/us/kbase/catalog/GetLocalFunctionDetails.java
diff --git a/lib/java/us/kbase/catalog/GetSecureConfigParamsInput.java b/src/main/java/us/kbase/catalog/GetSecureConfigParamsInput.java
similarity index 100%
rename from lib/java/us/kbase/catalog/GetSecureConfigParamsInput.java
rename to src/main/java/us/kbase/catalog/GetSecureConfigParamsInput.java
diff --git a/lib/java/us/kbase/catalog/IOTags.java b/src/main/java/us/kbase/catalog/IOTags.java
similarity index 100%
rename from lib/java/us/kbase/catalog/IOTags.java
rename to src/main/java/us/kbase/catalog/IOTags.java
diff --git a/lib/java/us/kbase/catalog/ListBuildParams.java b/src/main/java/us/kbase/catalog/ListBuildParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ListBuildParams.java
rename to src/main/java/us/kbase/catalog/ListBuildParams.java
diff --git a/lib/java/us/kbase/catalog/ListFavoriteCounts.java b/src/main/java/us/kbase/catalog/ListFavoriteCounts.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ListFavoriteCounts.java
rename to src/main/java/us/kbase/catalog/ListFavoriteCounts.java
diff --git a/lib/java/us/kbase/catalog/ListLocalFunctionParams.java b/src/main/java/us/kbase/catalog/ListLocalFunctionParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ListLocalFunctionParams.java
rename to src/main/java/us/kbase/catalog/ListLocalFunctionParams.java
diff --git a/lib/java/us/kbase/catalog/ListModuleParams.java b/src/main/java/us/kbase/catalog/ListModuleParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ListModuleParams.java
rename to src/main/java/us/kbase/catalog/ListModuleParams.java
diff --git a/lib/java/us/kbase/catalog/ListServiceModuleParams.java b/src/main/java/us/kbase/catalog/ListServiceModuleParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ListServiceModuleParams.java
rename to src/main/java/us/kbase/catalog/ListServiceModuleParams.java
diff --git a/lib/java/us/kbase/catalog/LocalFunctionDetails.java b/src/main/java/us/kbase/catalog/LocalFunctionDetails.java
similarity index 100%
rename from lib/java/us/kbase/catalog/LocalFunctionDetails.java
rename to src/main/java/us/kbase/catalog/LocalFunctionDetails.java
diff --git a/lib/java/us/kbase/catalog/LocalFunctionInfo.java b/src/main/java/us/kbase/catalog/LocalFunctionInfo.java
similarity index 100%
rename from lib/java/us/kbase/catalog/LocalFunctionInfo.java
rename to src/main/java/us/kbase/catalog/LocalFunctionInfo.java
diff --git a/lib/java/us/kbase/catalog/LocalFunctionTags.java b/src/main/java/us/kbase/catalog/LocalFunctionTags.java
similarity index 100%
rename from lib/java/us/kbase/catalog/LocalFunctionTags.java
rename to src/main/java/us/kbase/catalog/LocalFunctionTags.java
diff --git a/lib/java/us/kbase/catalog/LogExecStatsParams.java b/src/main/java/us/kbase/catalog/LogExecStatsParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/LogExecStatsParams.java
rename to src/main/java/us/kbase/catalog/LogExecStatsParams.java
diff --git a/lib/java/us/kbase/catalog/ModifySecureConfigParamsInput.java b/src/main/java/us/kbase/catalog/ModifySecureConfigParamsInput.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ModifySecureConfigParamsInput.java
rename to src/main/java/us/kbase/catalog/ModifySecureConfigParamsInput.java
diff --git a/lib/java/us/kbase/catalog/ModuleInfo.java b/src/main/java/us/kbase/catalog/ModuleInfo.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ModuleInfo.java
rename to src/main/java/us/kbase/catalog/ModuleInfo.java
diff --git a/lib/java/us/kbase/catalog/ModuleState.java b/src/main/java/us/kbase/catalog/ModuleState.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ModuleState.java
rename to src/main/java/us/kbase/catalog/ModuleState.java
diff --git a/lib/java/us/kbase/catalog/ModuleVersion.java b/src/main/java/us/kbase/catalog/ModuleVersion.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ModuleVersion.java
rename to src/main/java/us/kbase/catalog/ModuleVersion.java
diff --git a/lib/java/us/kbase/catalog/ModuleVersionInfo.java b/src/main/java/us/kbase/catalog/ModuleVersionInfo.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ModuleVersionInfo.java
rename to src/main/java/us/kbase/catalog/ModuleVersionInfo.java
diff --git a/lib/java/us/kbase/catalog/ModuleVersionLookupParams.java b/src/main/java/us/kbase/catalog/ModuleVersionLookupParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ModuleVersionLookupParams.java
rename to src/main/java/us/kbase/catalog/ModuleVersionLookupParams.java
diff --git a/lib/java/us/kbase/catalog/Parameter.java b/src/main/java/us/kbase/catalog/Parameter.java
similarity index 100%
rename from lib/java/us/kbase/catalog/Parameter.java
rename to src/main/java/us/kbase/catalog/Parameter.java
diff --git a/lib/java/us/kbase/catalog/RegisterRepoParams.java b/src/main/java/us/kbase/catalog/RegisterRepoParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/RegisterRepoParams.java
rename to src/main/java/us/kbase/catalog/RegisterRepoParams.java
diff --git a/lib/java/us/kbase/catalog/ReleaseReview.java b/src/main/java/us/kbase/catalog/ReleaseReview.java
similarity index 100%
rename from lib/java/us/kbase/catalog/ReleaseReview.java
rename to src/main/java/us/kbase/catalog/ReleaseReview.java
diff --git a/lib/java/us/kbase/catalog/RequestedReleaseInfo.java b/src/main/java/us/kbase/catalog/RequestedReleaseInfo.java
similarity index 100%
rename from lib/java/us/kbase/catalog/RequestedReleaseInfo.java
rename to src/main/java/us/kbase/catalog/RequestedReleaseInfo.java
diff --git a/lib/java/us/kbase/catalog/SecureConfigParameter.java b/src/main/java/us/kbase/catalog/SecureConfigParameter.java
similarity index 100%
rename from lib/java/us/kbase/catalog/SecureConfigParameter.java
rename to src/main/java/us/kbase/catalog/SecureConfigParameter.java
diff --git a/lib/java/us/kbase/catalog/SelectModuleVersion.java b/src/main/java/us/kbase/catalog/SelectModuleVersion.java
similarity index 100%
rename from lib/java/us/kbase/catalog/SelectModuleVersion.java
rename to src/main/java/us/kbase/catalog/SelectModuleVersion.java
diff --git a/lib/java/us/kbase/catalog/SelectModuleVersionParams.java b/src/main/java/us/kbase/catalog/SelectModuleVersionParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/SelectModuleVersionParams.java
rename to src/main/java/us/kbase/catalog/SelectModuleVersionParams.java
diff --git a/lib/java/us/kbase/catalog/SelectOneLocalFunction.java b/src/main/java/us/kbase/catalog/SelectOneLocalFunction.java
similarity index 100%
rename from lib/java/us/kbase/catalog/SelectOneLocalFunction.java
rename to src/main/java/us/kbase/catalog/SelectOneLocalFunction.java
diff --git a/lib/java/us/kbase/catalog/SelectOneModuleParams.java b/src/main/java/us/kbase/catalog/SelectOneModuleParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/SelectOneModuleParams.java
rename to src/main/java/us/kbase/catalog/SelectOneModuleParams.java
diff --git a/lib/java/us/kbase/catalog/SetRegistrationStateParams.java b/src/main/java/us/kbase/catalog/SetRegistrationStateParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/SetRegistrationStateParams.java
rename to src/main/java/us/kbase/catalog/SetRegistrationStateParams.java
diff --git a/lib/java/us/kbase/catalog/SpecFile.java b/src/main/java/us/kbase/catalog/SpecFile.java
similarity index 100%
rename from lib/java/us/kbase/catalog/SpecFile.java
rename to src/main/java/us/kbase/catalog/SpecFile.java
diff --git a/lib/java/us/kbase/catalog/UpdateGitUrlParams.java b/src/main/java/us/kbase/catalog/UpdateGitUrlParams.java
similarity index 100%
rename from lib/java/us/kbase/catalog/UpdateGitUrlParams.java
rename to src/main/java/us/kbase/catalog/UpdateGitUrlParams.java
diff --git a/lib/java/us/kbase/catalog/VersionCommitInfo.java b/src/main/java/us/kbase/catalog/VersionCommitInfo.java
similarity index 100%
rename from lib/java/us/kbase/catalog/VersionCommitInfo.java
rename to src/main/java/us/kbase/catalog/VersionCommitInfo.java
diff --git a/lib/java/us/kbase/catalog/VolumeMount.java b/src/main/java/us/kbase/catalog/VolumeMount.java
similarity index 100%
rename from lib/java/us/kbase/catalog/VolumeMount.java
rename to src/main/java/us/kbase/catalog/VolumeMount.java
diff --git a/lib/java/us/kbase/catalog/VolumeMountConfig.java b/src/main/java/us/kbase/catalog/VolumeMountConfig.java
similarity index 100%
rename from lib/java/us/kbase/catalog/VolumeMountConfig.java
rename to src/main/java/us/kbase/catalog/VolumeMountConfig.java
diff --git a/lib/java/us/kbase/catalog/VolumeMountFilter.java b/src/main/java/us/kbase/catalog/VolumeMountFilter.java
similarity index 100%
rename from lib/java/us/kbase/catalog/VolumeMountFilter.java
rename to src/main/java/us/kbase/catalog/VolumeMountFilter.java
diff --git a/test/catalog_config_test.py b/test/catalog_config_test.py
new file mode 100644
index 00000000..79a754f2
--- /dev/null
+++ b/test/catalog_config_test.py
@@ -0,0 +1,30 @@
+import unittest
+
+from biokbase.catalog.Impl import Catalog
+from catalog_test_util import CatalogTestUtil
+
+_RETRY_WRITES = "mongodb-retrywrites"
+
+
+class CatalogConfigTest(unittest.TestCase):
+
+ def test_catalog_without_retryWrites(self):
+ self.catalog_cfg.pop(_RETRY_WRITES, None)
+ catalog = Catalog(self.catalog_cfg)
+ self.assertFalse(catalog.cc.db.mongo_retry_writes)
+
+ def test_catalog_with_retryWrites_is_true(self):
+ self.catalog_cfg[_RETRY_WRITES] = "true"
+ catalog = Catalog(self.catalog_cfg)
+ self.assertTrue(catalog.cc.db.mongo_retry_writes)
+
+ @classmethod
+ def setUpClass(cls):
+ print("++++++++++++ RUNNING catalog_config_test.py +++++++++++")
+ cls.cUtil = CatalogTestUtil(".") # TODO: pass in test directory from outside
+ cls.cUtil.setUp()
+ cls.catalog_cfg = cls.cUtil.getCatalogConfig()
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.cUtil.tearDown()
diff --git a/test/catalog_test_util.py b/test/catalog_test_util.py
index ce2a72e1..20b722fa 100644
--- a/test/catalog_test_util.py
+++ b/test/catalog_test_util.py
@@ -42,11 +42,8 @@ def _setup_config(self):
config = ConfigParser()
config.read(os.path.join(self.test_dir, 'test.cfg'))
self.test_cfg = {}
- self.nms_test_cfg = {}
for entry in config.items('catalog-test'):
self.test_cfg[entry[0]] = entry[1]
- for entry in config.items('NarrativeMethodStore'):
- self.nms_test_cfg[entry[0]] = entry[1]
self.log('test.cfg parse\n' + pformat(self.test_cfg))
# passwords not needed in tests yet
@@ -230,7 +227,10 @@ def tearDown(self):
self.dockerclient.remove_image(image['Id'])
# make sure NMS is clean after each test
- self.mongo.drop_database(self.nms_test_cfg['method-spec-mongo-dbname'])
+ # Drop the database set by the method_spec_mongo_dbname environment variable
+ # in the docker-compose_nms.yml file.
+ nms_mongo_client = MongoClient('mongodb://localhost:27018')
+ nms_mongo_client.drop_database('method_store_repo_db')
def log(self, mssg):
# uncomment to debug test rig- warning: on travis this may print any passwords in your config
diff --git a/test/core_registration_test.py b/test/core_registration_test.py
index e222bb84..c1e63c72 100644
--- a/test/core_registration_test.py
+++ b/test/core_registration_test.py
@@ -37,6 +37,7 @@ def test_full_module_lifecycle(self):
self.assertEqual(state['registration'], 'complete')
# (3) check the log
+ sleep(3) # sleep to make sure the catalog db gets the final log messages
parsed_log = self.catalog.get_parsed_build_log(self.cUtil.anonymous_ctx(),
{'registration_id': registration_id})[0]
self.assertEqual(parsed_log['registration'], 'complete')
diff --git a/test/docker-compose_nms.yml b/test/docker-compose_nms.yml
new file mode 100644
index 00000000..658b1343
--- /dev/null
+++ b/test/docker-compose_nms.yml
@@ -0,0 +1,50 @@
+version: '3.4'
+
+services:
+ nms:
+ image: ghcr.io/kbase/narrative_method_store:v0.3.12
+ platform: linux/amd64
+ ports:
+ - "7125:7125"
+ depends_on: ["mongo"]
+ environment:
+ # see deployment/conf/.templates for more environment variables
+ - method_spec_git_repo=https://github.com/kbase/narrative_method_specs_ci
+ - method_spec_git_repo_branch=master
+ - method_spec_git_repo_local_dir=narrative_method_specs_recreated_at_startup
+ - method_spec_git_repo_refresh_rate=2
+ - method_spec_cache_size=5000
+ - method_spec_temp_dir=narrative_method_store_temp
+ - method_spec_mongo_host=mongo:27017
+ - method_spec_mongo_dbname=method_store_repo_db
+ - method_spec_admin_users=${ADMIN_USER}
+ # For local testing, you can comment the above line and
+ # set method_spec_admin_users directly to your kbase_id, as shown in the example below:
+ # - method_spec_admin_users=sijiex
+ - endpoint_host=https://ci.kbase.us
+ - endpoint_base=/services
+ - method_spec_default_tag=dev
+ - auth_service_url=https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login
+ - auth_service_url_allow_insecure=false
+ - service_port=7125
+ command:
+ - "-wait"
+ - "tcp://mongo:27017"
+ - "-timeout"
+ - "120s"
+ - "-template"
+ - "/kb/deployment/conf/.templates/deployment.cfg.templ:/kb/deployment/conf/deployment.cfg"
+ - "-template"
+ - "/kb/deployment/conf/.templates/http.ini.templ:/kb/deployment/services/narrative_method_store/start.d/http.ini"
+ - "-template"
+ - "/kb/deployment/conf/.templates/server.ini.templ:/kb/deployment/services/narrative_method_store/start.d/server.ini"
+ - "-template"
+ - "/kb/deployment/conf/.templates/start_server.sh.templ:/kb/deployment/bin/start_server.sh"
+ - "-stdout"
+ - "/kb/deployment/services/narrative_method_store/logs/request.log"
+ - "/kb/deployment/bin/start_server.sh"
+
+ mongo:
+ image: "mongo:7.0.4"
+ ports:
+ - "27018:27017"
\ No newline at end of file
diff --git a/test/local_function_module_test.py b/test/local_function_module_test.py
index bf510e51..0b173039 100644
--- a/test/local_function_module_test.py
+++ b/test/local_function_module_test.py
@@ -16,7 +16,7 @@ def test_local_function_module(self):
# assume test user is already approved as a developer
# (1) register the test repo
giturl = self.cUtil.get_test_repo_1()
- githash = 'a01e1a20b9c504a0136c75323b00b1cd4c7f7970' # branch local_method_module
+ githash = 'a8915afe6811de9199897d710348befad8f6f7ab' # branch local_method_module
registration_id = self.catalog.register_repo(self.cUtil.user_ctx(),
{'git_url': giturl,
'git_commit_hash': githash})[0]
@@ -59,7 +59,7 @@ def test_local_function_module(self):
self.assertEqual(len(specs), 1)
info = specs[0]['info']
self.assertEqual(info['function_id'], 'powerpoint_to_genome')
- self.assertEqual(info['git_commit_hash'], 'a01e1a20b9c504a0136c75323b00b1cd4c7f7970')
+ self.assertEqual(info['git_commit_hash'], 'a8915afe6811de9199897d710348befad8f6f7ab')
self.assertEqual(info['module_name'], 'GenomeToPowerpointConverter')
self.assertEqual(info['name'], 'Powerpoint to Genome')
self.assertEqual(info['release_tag'], ['dev'])
@@ -122,7 +122,7 @@ def test_local_function_module(self):
self.assertEqual(len(specs), 1)
info = specs[0]['info']
self.assertEqual(info['function_id'], 'powerpoint_to_genome')
- self.assertEqual(info['git_commit_hash'], 'a01e1a20b9c504a0136c75323b00b1cd4c7f7970')
+ self.assertEqual(info['git_commit_hash'], 'a8915afe6811de9199897d710348befad8f6f7ab')
self.assertEqual(info['module_name'], 'GenomeToPowerpointConverter')
self.assertEqual(info['name'], 'Powerpoint to Genome')
self.assertEqual(info['release_tag'], ['beta', 'dev'])
@@ -179,12 +179,12 @@ def test_local_function_module(self):
# make sure we can fetch it by commit hash
specs = self.catalog.get_local_function_details(self.cUtil.user_ctx(), {'functions': [
{'module_name': 'GenomeTopowerpointConverter', 'function_id': 'powerpoint_to_genome',
- 'git_commit_hash': 'a01e1a20b9c504a0136c75323b00b1cd4c7f7970'}]})[0]
+ 'git_commit_hash': 'a8915afe6811de9199897d710348befad8f6f7ab'}]})[0]
self.assertEqual(len(specs), 1)
info = specs[0]['info']
self.assertEqual(info['function_id'], 'powerpoint_to_genome')
- self.assertEqual(info['git_commit_hash'], 'a01e1a20b9c504a0136c75323b00b1cd4c7f7970')
+ self.assertEqual(info['git_commit_hash'], 'a8915afe6811de9199897d710348befad8f6f7ab')
self.assertEqual(info['module_name'], 'GenomeToPowerpointConverter')
self.assertEqual(info['name'], 'Powerpoint to Genome')
self.assertEqual(info['release_tag'], ['release', 'beta', 'dev'])
diff --git a/test/mock_auth/server.py b/test/mock_auth/server.py
new file mode 100644
index 00000000..9a212a1d
--- /dev/null
+++ b/test/mock_auth/server.py
@@ -0,0 +1,147 @@
+# We are replacing this file in the mock_auth server container so that the Mock server serves routes
+# in a specific order, which will allow the test to pass. The original code uses os.listdir(),
+# which does not guarantee preserving the order of the JSON files in the config directory.
+
+import os
+import sys
+import json
+import jsonschema
+import traceback
+import flask
+from jsonschema.exceptions import ValidationError
+
+# Load the endpoints data, the schema, and validate the structure
+
+# For validating every config file
+with open('endpoint_schema.json') as fd:
+ endpoint_schema = json.load(fd)
+
+if not os.path.exists('/config'):
+ sys.stderr.write('Path not found: /config\n')
+ sys.exit(1)
+
+endpoints = []
+# Mock server serves routes in a specific order, and this is the current order required for the tests to pass.
+# Changing the order may cause the tests to fail, so it's important to maintain this sequence.
+catalog_mock_auth = ['auth_admin.json', 'auth_invalid.json', 'auth_missing.json', 'auth_non_admin.json']
+for path in catalog_mock_auth:
+ if path.endswith('.json'):
+ full_path = '/config/' + path
+ with open(full_path) as fd:
+ try:
+ endpoint = json.load(fd)
+ except ValueError as err:
+ sys.stderr.write(f'JSON parsing error:\n{err}')
+ sys.exit(1)
+ try:
+ jsonschema.validate(endpoint, endpoint_schema)
+ except ValidationError as err:
+ sys.stderr.write(f'JSON Schema validation Error for {path}:\n')
+ sys.stderr.write(str(err) + '\n')
+ sys.exit(1)
+ endpoints.append(endpoint)
+
+print(f'Loaded {len(endpoints)} mock endpoints')
+
+# Start the Flask app
+app = flask.Flask(__name__)
+methods = ['GET', 'POST', 'PUT', 'DELETE']
+
+
+@app.route('/', defaults={'path': ''}, methods=methods)
+@app.route('/', methods=methods)
+def handle_request(path):
+ """
+ Catch-all: handle any request against the endpoints.json data.
+ """
+ print('-' * 80)
+ path = '/' + path
+ req_body = flask.request.get_data().decode() or ''
+ method = flask.request.method
+ # Find the first endpoint that matches path, method, headers, and body
+ for endpoint in endpoints:
+ if endpoint['path'] == path:
+ print('Matched path:', path)
+ else:
+ continue
+ expected_methods = endpoint.get('methods', ['GET'])
+ if method in expected_methods:
+ print('Matched method')
+ else:
+ msg = f'Mismatch on method: {method} vs {expected_methods}'
+ print(msg)
+ continue
+ if match_headers(endpoint):
+ print('Matched headers')
+ else:
+ hs = dict(flask.request.headers)
+ expected_hs = endpoint.get('headers')
+ msg = f'Mismatch on headers:\n got: {hs}\n expected: {expected_hs}'
+ print(msg)
+ continue
+ expected_body = endpoint.get('body', '')
+ if isinstance(expected_body, dict):
+ expected_body_json = json.dumps(expected_body)
+ try:
+ given_body_json = json.dumps(json.loads(req_body))
+ except Exception as err:
+ print('Error parsing json body:', str(err))
+ continue
+ body_ok = expected_body_json == given_body_json
+ else:
+ body_ok = expected_body.strip() == req_body.strip()
+ if body_ok:
+ print('Matched body')
+ else:
+ msg = f'Mismatch on body:\n got: {req_body}\n expected: {expected_body}'
+ print(msg)
+ continue
+ print('Matched endpoint {} {}'.format(method, path))
+ return mock_response(endpoint.get('response', {}))
+ raise Exception('Unable to match endpoint: %s %s' % (method, path))
+
+
+@app.errorhandler(Exception)
+def any_exception(err):
+ """Catch any error with a JSON response."""
+ class_name = err.__class__.__name__
+ print(traceback.format_exc())
+ resp = {'error': str(err), 'class': class_name}
+ return (flask.jsonify(resp), 500)
+
+
+def match_headers(endpoint):
+ """
+ Either check that there are no headers to match, or match that all headers
+ in the endpoint are present and equal in the request.
+ """
+ if 'headers' not in endpoint and 'absent_headers' not in endpoint:
+ return True
+ headers = dict(flask.request.headers)
+ if 'headers' in endpoint:
+ for (key, val) in endpoint['headers'].items():
+ if val != headers.get(key):
+ return False
+ # Enforce that certain headers must be absent
+ if 'absent_headers' in endpoint:
+ header_keys = set(key.lower() for key in headers.keys())
+ print('headers are', headers)
+ for key in endpoint['absent_headers']:
+ print('checking absent', key)
+ if key.lower() in header_keys:
+ return False
+ return True
+
+
+def mock_response(config):
+ """
+ Create a mock flask response from the endpoints.json configuration
+ """
+ resp_body = config.get('body')
+ if isinstance(resp_body, dict):
+ resp_body = json.dumps(resp_body)
+ resp = flask.Response(resp_body)
+ resp.status = config.get('status', '200')
+ for (header, val) in config.get('headers', {}).items():
+ resp.headers[header] = val
+ return resp
diff --git a/test/run_tests.sh b/test/run_tests.sh
index e4d84c58..0b9e03ff 100755
--- a/test/run_tests.sh
+++ b/test/run_tests.sh
@@ -11,16 +11,15 @@
# start the test NMS endpoint
echo 'Starting NMS...'
export KB_DEPLOYMENT_CONFIG=test.cfg
-classpath=`cat ../narrative_method_store/dist/jar.classpath.txt`
-java -cp $classpath us.kbase.narrativemethodstore.NarrativeMethodStoreServer 7125 > nms/error.log 2>&1 &
+docker compose -f docker-compose_nms.yml up -d
NMS_PID=$!
echo 'Starting Mock Auth API...'
-docker run -d --rm -v ${PWD}/mock_auth:/config -p 7777:5000 --name mock-auth mockservices/mock_json_service
+docker run -d --rm -v ${PWD}/mock_auth:/config -v ${PWD}/mock_auth/server.py:/server/server.py -p 7777:5000 --name mock-auth mockservices/mock_json_service
echo 'Waiting for NMS to start...'
sleep 25
-curl -d '{"id":"1","params":[],"method":"NarrativeMethodStore.ver","version":"1.1"}' http://localhost:7125
+curl -d '{"id":"1","params":[],"method":"NarrativeMethodStore.ver","version":"1.1"}' http://localhost:7125/rpc
if [ $? -ne 0 ]; then
kill -9 $NMS_PID
echo 'NMS did not startup in time. Fail.'
@@ -57,7 +56,7 @@ echo "unit tests returned with error code=${TEST_RETURN_CODE}"
#### SHUTDOWN stuff and exit
# stop NMS
-kill -9 $NMS_PID
+docker compose -f docker-compose_nms.yml down
#stop Docker containers
docker stop mock-auth
diff --git a/test/test.cfg.example b/test/test.cfg.example
index 75e3f505..e2f3a32f 100644
--- a/test/test.cfg.example
+++ b/test/test.cfg.example
@@ -3,11 +3,8 @@
# TO RUN TESTS:
# 1) copy this file to test.cfg
-# 2) fill in the mongodb-host url in both [catalog-test] and [NarrativeMethodStore]
-# 3) set the nms-admin-user and nms-admin-password or nms-admin-token to a
-# kb user that matches the method-spec-admin-users list in
-# the [NarrativeMethodStore section]
-#
+# 2) set the nms-admin-token to a kb user that matches the method-spec-admin-users list
+# in the docker-compose_nms.yml file
[catalog-test]
@@ -38,28 +35,9 @@ mongodb-database = catalog-test
docker-base-url = unix://var/run/docker.sock
# Narrative Method Store configuration. Please provide a token.
-# If both are provided, the token is used.
-nms-url = http://localhost:7125
+nms-url = http://localhost:7125/rpc
nms-admin-token =
# configs for reference data
ref-data-base = /kb/data
kbase-endpoint = https://ci.kbase.us/services
-
-
-[NarrativeMethodStore]
-
-method-spec-mongo-host = localhost:27017
-method-spec-admin-users = wstester4
-
-method-spec-git-repo-local-dir = nms/local_narrative_method_specs
-method-spec-temp-dir = nms/scratch
-
-method-spec-git-repo = https://github.com/kbase/narrative_method_specs
-method-spec-git-repo-branch = develop
-method-spec-git-repo-refresh-rate = 2000
-method-spec-cache-size = 5000
-method-spec-mongo-dbname = method_store_repo_db
-
-# The KBase auth server url.
-auth-service-url=https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login