diff --git a/app/controllers/ApiController.scala b/app/controllers/ApiController.scala index 4a6085ab..5d5effbb 100644 --- a/app/controllers/ApiController.scala +++ b/app/controllers/ApiController.scala @@ -1,29 +1,32 @@ +//CJM 10 package controllers -import java.io.{OutputStream, PipedInputStream, PipedOutputStream} import akka.stream.scaladsl.{Source, StreamConverters} import akka.util.ByteString - -import javax.inject.Inject -import play.api.Logging -import play.api.mvc._ -import play.api.libs.json._ -import play.api.libs.json.Reads._ - -import java.nio.file.Paths -import java.time.format.DateTimeFormatter -import java.time.LocalDateTime -import scala.concurrent.{ExecutionContext, Future} import controllers.auth.{AuthActionFactory, UserRequest} import models.FeatureToggleModel.FeatureToggleService import models._ import models.config.SmuiVersion -import models.input.{InputTagId, InputValidator, ListItem, SearchInputId, SearchInputWithRules} +import models.input._ import models.querqy.QuerqyRulesTxtGenerator import models.spellings.{CanonicalSpellingId, CanonicalSpellingValidator, CanonicalSpellingWithAlternatives} +import models.validatedimport.ValidatedImportData import org.checkerframework.checker.units.qual.A +import play.api.Logging +import play.api.libs.Files +import play.api.libs.Files.TemporaryFile.temporaryFileToPath +import play.api.libs.json.Reads._ +import play.api.libs.json._ +import play.api.mvc._ import services.{RulesTxtDeploymentService, RulesTxtImportService} +import java.io.{OutputStream, PipedInputStream, PipedOutputStream} +import java.nio.file.Paths +import java.time.LocalDateTime +import java.time.format.DateTimeFormatter +import javax.inject.Inject +import scala.concurrent.{ExecutionContext, Future} + // TODO Make ApiController pure REST- / JSON-Controller to ensure all implicit Framework responses (e.g. 400, 500) conformity class ApiController @Inject()(authActionFactory: AuthActionFactory, @@ -58,11 +61,20 @@ class ApiController @Inject()(authActionFactory: AuthActionFactory, jsonBody.map { json => val searchIndexName = (json \ "name").as[String] val searchIndexDescription = (json \ "description").as[String] - val solrIndexId = searchManagementRepository.addNewSolrIndex( - SolrIndex(name = searchIndexName, description = searchIndexDescription) - ) - Ok(Json.toJson(ApiResult(API_RESULT_OK, "Successfully added Deployment Channel '" + searchIndexName + "'.", Some(solrIndexId)))) + try { + var solrIndexId = searchManagementRepository.addNewSolrIndex( + SolrIndex(name = searchIndexName, description = searchIndexDescription) + ); + logger.debug("solrIndexId:" + solrIndexId); + Ok(Json.toJson(ApiResult(API_RESULT_OK, "Successfully added Deployment Channel '" + searchIndexName + "'.", Some(solrIndexId)))) + } catch { + case e: Exception => { + logger.debug("The searchIndexDescription (Search Engine Collection Name) given was likely a duplicate."); + BadRequest(Json.toJson(ApiResult(API_RESULT_FAIL, "Could not add Rules Collection. Only one Rules Collection per Search Engine Collection is allowed.", None))) + }; + } + }.getOrElse { BadRequest(Json.toJson(ApiResult(API_RESULT_FAIL, "Adding new Deployment Channel failed. Unexpected body data.", None))) } @@ -95,6 +107,7 @@ class ApiController @Inject()(authActionFactory: AuthActionFactory, } } + //CJM 8 def downloadAllRulesTxtFiles = authActionFactory.getAuthenticatedAction(Action) { req => Ok.chunked( createStreamResultInBackground( @@ -126,6 +139,7 @@ class ApiController @Inject()(authActionFactory: AuthActionFactory, def addNewSearchInput(solrIndexId: String) = authActionFactory.getAuthenticatedAction(Action).async { request: Request[AnyContent] => Future { + logger.debug("addNewSearchInput") val userInfo: Option[String] = lookupUserInfo(request) val body: AnyContent = request.body @@ -156,16 +170,26 @@ class ApiController @Inject()(authActionFactory: AuthActionFactory, def updateSearchInput(searchInputId: String) = authActionFactory.getAuthenticatedAction(Action) { request: Request[AnyContent] => - val body: AnyContent = request.body + logger.debug("updateSearchInput:1") + var body: AnyContent = request.body + //var jsonString: String = "{\"id\":\"ab116147-498a-427e-9481-9565739aa706\",\"term\":\"synonym1\",\"synonymRules\":[{\"id\":\"e7ab9b28-e6af-48dc-af7f-224b00381e62\",\"synonymType\":1,\"term\":\"synonym2\",\"isActive\":true,\"status\":1,\"lastUpdate\":\"2005\"}],\"upDownRules\":[],\"filterRules\":[],\"deleteRules\":[],\"redirectRules\":[],\"tags\":[],\"isActive\":true,\"comment\":\"synonym3\"}" + //val jsVal: JsValue = Json.parse(jsonString) + //body = AnyContentAsJson(jsVal) val jsonBody: Option[JsValue] = body.asJson val userInfo: Option[String] = lookupUserInfo(request) - + logger.debug("updateSearchInput:2") + logger.debug("updateSearchInput:body") + logger.debug(body.toString) + logger.debug("updateSearchInput:jsonBody") + logger.debug(jsonBody.toString) // Expecting json body jsonBody.map { json => + logger.debug("updateSearchInput:3") val searchInput = json.as[SearchInputWithRules] - + logger.debug("updateSearchInput:4") InputValidator.validateInputTerm(searchInput.term) match { case Nil => { + logger.debug("updateSearchInput:5") // proceed updating input with rules querqyRulesTxtGenerator.validateSearchInputToErrMsg(searchInput) match { case Some(strErrMsg: String) => @@ -180,6 +204,7 @@ class ApiController @Inject()(authActionFactory: AuthActionFactory, } } case errors => { + logger.debug("updateSearchInput:6") val msgs = s"Failed to update Search Input with new term ${searchInput.term}: " + errors.mkString("\n") logger.error(msgs) BadRequest(Json.toJson(ApiResult(API_RESULT_FAIL, msgs, None))) @@ -187,6 +212,7 @@ class ApiController @Inject()(authActionFactory: AuthActionFactory, } }.getOrElse { + logger.debug("updateSearchInput:7") BadRequest(Json.toJson(ApiResult(API_RESULT_FAIL, "Adding new Search Input failed. Unexpected body data.", None))) } } @@ -205,6 +231,19 @@ class ApiController @Inject()(authActionFactory: AuthActionFactory, Ok(Json.toJson(ListItem.create(searchInputs, spellings))) } +// def listAll2(solrIndexId: String) = authActionFactory.getAuthenticatedAction(Action) { +// //val searchInputs = searchManagementRepository.listAllSearchInputsInclDirectedSynonyms(SolrIndexId(solrIndexId)) +// //val searchInputs = searchManagementRepository.listAllSearchInputsInclDirectedSynonyms(SolrIndexId(solrIndexId)) +// //val spellings = searchManagementRepository.listAllSpellingsWithAlternatives(SolrIndexId(solrIndexId)) +// Future { +// this.getSolrIndex(solrIndexId) +// Ok(Json.toJson(ApiResult(API_RESULT_OK, "Solr Index successful", None))) +// +// } +// //Ok(s); +// //Ok(Json.toJson(x)) +// } + def addNewSpelling(solrIndexId: String) = authActionFactory.getAuthenticatedAction(Action).async { request: Request[AnyContent] => Future { val userInfo: Option[String] = lookupUserInfo(request) @@ -574,4 +613,52 @@ class ApiController @Inject()(authActionFactory: AuthActionFactory, } } + def getDatabaseJsonWithId(id: String): Action[AnyContent] = authActionFactory.getAuthenticatedAction(Action).async { + Future { + logger.debug("In ApiController:getDatabaseJsonWithId and got id: " + id) + Ok(Json.toJson(searchManagementRepository.getDatabaseJsonWithId(id))) + } + } + + def uploadImport: Action[MultipartFormData[Files.TemporaryFile]] = authActionFactory.getAuthenticatedAction(Action).async(parse.multipartFormData) { implicit request => + Future { + val tryDatabaseStuff: Boolean = true + logger.debug("In ApiController:uploadImport") + if (request.body.files.size == 1) { + + val fileName: String = request.body.files.head.filename + logger.debug(fileName) + import java.nio.file.Files + val content = Files.readString(temporaryFileToPath(request.body.files.head.ref)) + logger.debug(content) + val validatedImport: ValidatedImportData = new ValidatedImportData(fileName, content) + if (tryDatabaseStuff) { + searchManagementRepository.doImport(validatedImport) + } + Ok(Json.toJson(ApiResult(API_RESULT_OK, "Got file.", None))) + } else { + BadRequest("Only one upload file is allowed. Input must be valid") + } + } + } + +// def putty: Action[AnyContent] = authActionFactory.getAuthenticatedAction(Action).async { +// Future { +// logger.debug("ApiController.putty():1") +// searchManagementRepository.putty +// logger.debug("ApiController.putty():2") +// Ok(Json.toJson(ApiResult(API_RESULT_OK, "That worked.", None))) +// } +// } + + def putty: Action[AnyContent] = authActionFactory.getAuthenticatedAction(Action).async { + Future { + logger.debug("In ApiController:putty") + val content = "[{\"tableName\":\"solr_index\",\"columns\":[\"id\",\"name\",\"description\",\"last_update\"],\"rows\":[[\"b0eecea6-efa7-4575-9bb4-acba1aab146b\",\"b0eecea6-efa7-4575-9bb4-acba1aab146b\",\"test\",\"2022-05-31T11:14:38\"]]},{\"tableName\":\"search_input\",\"columns\":[\"id\",\"term\",\"solr_index_id\",\"last_update\",\"status\",\"comment\"],\"rows\":[[\"16c30efd-3139-4916-bfb6-57463af18250\",\"test\",\"b0eecea6-efa7-4575-9bb4-acba1aab146b\",\"2022-05-31T17:25:25\",1,\"updown comment\"],[\"5418428c-0d4c-4464-a2a6-084f264be360\",\"s\",\"b0eecea6-efa7-4575-9bb4-acba1aab146b\",\"2022-05-31T15:29:59\",1,\"syn com\"],[\"70823642-e7c6-4857-9d6c-a54b3c382f0d\",\"test\",\"b0eecea6-efa7-4575-9bb4-acba1aab146b\",\"2022-05-31T14:43:54\",1,\"\"],[\"89c10061-26d9-4b5f-9e99-92696cc5da74\",\"test two three\",\"b0eecea6-efa7-4575-9bb4-acba1aab146b\",\"2022-05-31T13:43:50\",1,\"a comment\"],[\"9fb7f8b4-5544-4df0-9d08-d485a0145dbe\",\"redirect\",\"b0eecea6-efa7-4575-9bb4-acba1aab146b\",\"2022-05-31T17:20:36\",1,\"redirect comment\"],[\"ccc48739-f192-44b1-b552-995eed4a0a51\",\"all\",\"b0eecea6-efa7-4575-9bb4-acba1aab146b\",\"2022-05-31T14:41:55\",1,\"\"],[\"dd1bd496-90ed-43ad-9895-e67a4f67adeb\",\"test1\",\"b0eecea6-efa7-4575-9bb4-acba1aab146b\",\"2022-05-31T13:54:27\",1,\"\"],[\"e8064dd4-0e76-4e0b-963a-06ea8cae65e2\",\"t345\",\"b0eecea6-efa7-4575-9bb4-acba1aab146b\",\"2022-05-31T14:56:51\",1,\"c\"]]},{\"tableName\":\"redirect_rule\",\"columns\":[\"id\",\"target\",\"search_input_id\",\"last_update\",\"status\"],\"rows\":[[\"89e5833a-64b4-4a97-924b-a18b66694437\",\"https://www.google.com\",\"9fb7f8b4-5544-4df0-9d08-d485a0145dbe\",\"2022-05-31T17:20:36\",1]]},{\"tableName\":\"synonym_rule\",\"columns\":[\"id\",\"synonymType\",\"term\",\"search_input_id\",\"last_update\",\"status\"],\"rows\":[[\"70d3eb55-ad67-4890-a157-130ea72637c1\",0,\"y\",\"5418428c-0d4c-4464-a2a6-084f264be360\",\"2022-05-31T15:29:59\",1]]},{\"tableName\":\"up_down_rule\",\"columns\":[\"id\",\"up_down_type\",\"boost_malus_type\",\"term\",\"search_input_id\",\"last_update\",\"status\"],\"rows\":[[\"a26f49ad-28ba-40e3-a968-ada168d948c7\",0,5,\"* a:test\",1,\"16c30efd-3139-4916-bfb6-57463af18250\",\"2022-05-31T17:25:25\"]]},{\"tableName\":\"delete_rule\",\"columns\":[\"id\",\"term\",\"search_input_id\",\"last_update\",\"status\"],\"rows\":[[\"36d7a7d2-4133-4bcc-b4b3-19ec6d0404d1\",\"two\",\"89c10061-26d9-4b5f-9e99-92696cc5da74\",\"2022-05-31T13:43:50\",1]]},{\"tableName\":\"filter_rule\",\"columns\":[\"id\",\"term\",\"search_input_id\",\"last_update\",\"status\"],\"rows\":[[\"8ed6c4bd-ac69-4a94-898c-fabb13a7fc47\",\"* test:* a:b\",\"e8064dd4-0e76-4e0b-963a-06ea8cae65e2\",\"2022-05-31T14:56:51\",1]]},{\"tableName\":\"suggested_solr_field\",\"columns\":[\"id\",\"name\",\"solr_index_id\",\"last_update\"],\"rows\":[[\"4ce83b3a-7263-4873-b4f2-a66a9321fdbb\",\"test\",\"b0eecea6-efa7-4575-9bb4-acba1aab146b\",\"2022-05-31T17:37:43\"]]},{\"tableName\":\"input_tag\",\"columns\":[\"id\",\"solr_index_id\",\"property\",\"tag_value\",\"exported\",\"predefined\",\"last_update\"],\"rows\":[[\"wh\",\"some solr_index_id\",\"some property\",\"some tag_value\",2345,123,\"2022-05-31T18:23:47\"]]},{\"tableName\":\"tag_2_input\",\"columns\":[\"id\",\"searchInputId\",\"last_update\"],\"rows\":[[\"hi\",\"3\",\"2022-05-31T18:22:14\"]]},{\"tableName\":\"canonical_spelling\",\"columns\":[\"id\",\"solr_index_id\",\"term\",\"status\",\"comment\",\"last_update\"],\"rows\":[[\"id8\",\"id9\",\"a_term_can_spell\",0,\"can_spell_comment\",\"2022-05-31T18:44:15\"]]},{\"tableName\":\"alternative_spelling\",\"columns\":[\"id\",\"canonical_spelling_id\",\"term\",\"status\",\"last_update\"],\"rows\":[[\"id10\",\"id11\",\"alt_spell_term\",0,\"2022-05-31T18:44:15\"]]}]" + val validatedImport: ValidatedImportData = new ValidatedImportData("unknown.txt", content) + searchManagementRepository.doImport(validatedImport) + Ok(Json.toJson(ApiResult(API_RESULT_OK, "OK.", None))) + } + } + } diff --git a/app/models/SearchManagementRepository.scala b/app/models/SearchManagementRepository.scala index 92b674cc..c3fb1b19 100644 --- a/app/models/SearchManagementRepository.scala +++ b/app/models/SearchManagementRepository.scala @@ -1,16 +1,20 @@ package models -import java.time.LocalDateTime -import java.util.{Date, UUID} -import javax.inject.Inject -import play.api.db.DBApi import anorm._ import models.FeatureToggleModel.FeatureToggleService -import models.input.{InputTag, InputTagId, PredefinedTag, SearchInput, SearchInputId, SearchInputWithRules, TagInputAssociation} -import models.spellings.{CanonicalSpelling, CanonicalSpellingId, CanonicalSpellingWithAlternatives} import models.eventhistory.{ActivityLog, ActivityLogEntry, InputEvent} +import models.export.Exporter +import models.input._ import models.reports.{ActivityReport, DeploymentLog, RulesReport} +import models.spellings.{CanonicalSpelling, CanonicalSpellingId, CanonicalSpellingWithAlternatives} +import models.validatedimport.{ValidatedImportData, ValidatedImportImporter} import play.api.Logging +import play.api.db.DBApi +import play.api.libs.json.JsValue + +import java.time.LocalDateTime +import java.util.{Date, UUID} +import javax.inject.Inject // TODO Make `userInfo` mandatory (for all input/spelling and deploymentLog CRUD operations), when removing unauthorized access. @javax.inject.Singleton @@ -304,4 +308,24 @@ class SearchManagementRepository @Inject()(dbapi: DBApi, toggleService: FeatureT } } + def getDatabaseJsonWithId(id: String): JsValue = db.withConnection { + implicit connection => { + logger.debug("In SearchManagementRepository:getDatabaseJsonWithId():1") + val exporter : Exporter = new Exporter(dbapi, toggleService) + exporter.getDatabaseJsonWithId(id) + } + } + + def doImport(validatedImport: ValidatedImportData): String = db.withTransaction { implicit connection => + var aString : String = "At SearchManagementRepository:doImport():1" + logger.debug(aString) + + val importer = new ValidatedImportImporter(validatedImport, dbapi, toggleService) + importer.performImport() + + aString = "At SearchManagementRepository:doImport():2" + logger.debug(aString) + aString + } + } diff --git a/app/models/SolrIndex.scala b/app/models/SolrIndex.scala index c8b98979..6a9df97c 100644 --- a/app/models/SolrIndex.scala +++ b/app/models/SolrIndex.scala @@ -52,6 +52,12 @@ object SolrIndex { allMatchingIndeces.head } +// def loadById(solrIndexId: String)(implicit connection: Connection): SolrIndex = { +// val allMatchingIndeces = SQL"select * from #$TABLE_NAME where id = $solrIndexId".as(sqlParser.*) +// +// allMatchingIndeces.head +// } + def insert(newSolrIndex: SolrIndex)(implicit connection: Connection): SolrIndexId = { SQL"insert into #$TABLE_NAME (id, name, description, last_update) values (${newSolrIndex.id}, ${newSolrIndex.name}, ${newSolrIndex.description}, ${new Date()})".execute() newSolrIndex.id diff --git a/app/models/export/AlternativeSpellingExport.scala b/app/models/export/AlternativeSpellingExport.scala new file mode 100644 index 00000000..da68cf10 --- /dev/null +++ b/app/models/export/AlternativeSpellingExport.scala @@ -0,0 +1,79 @@ +package models.`export` + +import anorm.SqlParser.get +import anorm.{RowParser, ~} +import models.`export`.JsonExportable +import models.input.SearchInputId +import models.rules.{CommonRuleFields, DeleteRuleId} +import models.spellings.{AlternativeSpellingId, CanonicalSpellingId} +import models.{SolrIndexId, Status, `export`} +import play.api.libs.json._ + +import java.time.{LocalDateTime, ZoneOffset} + +case class AlternativeSpellingExport(id: AlternativeSpellingId = AlternativeSpellingId(), + canonicalSpellingId: CanonicalSpellingId, + term: String, + status: Int, + lastUpdate: LocalDateTime) extends JsonExportable { + + def getTableName: JsString = JsString("alternative_spelling") + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("canonical_spelling_id"), + JsString("term"), + JsString("status"), + JsString("last_update") + ) + ) + } + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsString(canonicalSpellingId.toString), + JsString(term), + JsNumber(status), + JsString(lastUpdate.toString) + ) + ) + } + +} + +object AlternativeSpellingExport { + + val TABLE_NAME = "alternative_spelling" + val ID = "id" + val CANONICAL_SPELLING_ID = "canonical_spelling_id" + val TERM = "term" + val STATUS = "status" + val LAST_UPDATE = "last_update" + + val sqlParser: RowParser[AlternativeSpellingExport] = { + get[AlternativeSpellingId](s"$TABLE_NAME.$ID") ~ + get[CanonicalSpellingId](s"$TABLE_NAME.$CANONICAL_SPELLING_ID") ~ + get[String](s"$TABLE_NAME.$TERM") ~ + get[Int](s"$TABLE_NAME.$STATUS") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") map { + case id ~ canonicalSpellingId ~ term ~ status ~ lastUpdate => + AlternativeSpellingExport(id, canonicalSpellingId, term, status, lastUpdate) + } + } + + val selectAllStatement : String = { + s"select $TABLE_NAME.$ID, " + + s"$TABLE_NAME.$CANONICAL_SPELLING_ID, " + + s"$TABLE_NAME.$TERM, " + + s"$TABLE_NAME.$STATUS, " + + s"$TABLE_NAME.$LAST_UPDATE from $TABLE_NAME" + } + + def selectStatement(id: String) : String = { + this.selectAllStatement + s" where canonical_spelling_id in (select id from canonical_spelling where solr_index_id = '" + id + "')" + } +} \ No newline at end of file diff --git a/app/models/export/CanonicalSpellingExport.scala b/app/models/export/CanonicalSpellingExport.scala new file mode 100644 index 00000000..83ed4c74 --- /dev/null +++ b/app/models/export/CanonicalSpellingExport.scala @@ -0,0 +1,85 @@ +package models.`export` + +import anorm.SqlParser.get +import anorm.{RowParser, ~} +import models.`export`.JsonExportable +import models.input.SearchInputId +import models.rules.{CommonRuleFields, DeleteRuleId} +import models.spellings.CanonicalSpellingId +import models.{SolrIndexId, Status, `export`} +import play.api.libs.json._ + +import java.time.LocalDateTime + +case class CanonicalSpellingExport(id: CanonicalSpellingId = CanonicalSpellingId(), + solrIndexId: SolrIndexId, + term: String, + status: Int, + comment: String, + lastUpdate: LocalDateTime) extends JsonExportable { + + def getTableName: JsString = JsString("canonical_spelling") + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("solr_index_id"), + JsString("term"), + JsString("status"), + JsString("comment"), + JsString("last_update") + ) + ) + } + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsString(solrIndexId.toString), + JsString(term), + JsNumber(status), + JsString(comment), + JsString(lastUpdate.toString) + ) + ) + } + +} + +object CanonicalSpellingExport { + + val TABLE_NAME = "canonical_spelling" + val ID = "id" + val SOLR_INDEX_ID = "solr_index_id" + val TERM = "term" + val STATUS = "status" + val COMMENT = "comment" + val LAST_UPDATE = "last_update" + + val sqlParser: RowParser[CanonicalSpellingExport] = { + get[CanonicalSpellingId](s"$TABLE_NAME.$ID") ~ + get[SolrIndexId](s"$TABLE_NAME.$SOLR_INDEX_ID") ~ + get[String](s"$TABLE_NAME.$TERM") ~ + get[Int](s"$TABLE_NAME.$STATUS") ~ + get[String](s"$TABLE_NAME.$COMMENT") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") map { + case id ~ solrIndexId ~ term ~ status ~ comment ~ lastUpdate => + CanonicalSpellingExport(id, solrIndexId, term, status, comment, lastUpdate) + } + } + + val selectAllStatement : String = { + s"select $TABLE_NAME.$ID, " + + s"$TABLE_NAME.$SOLR_INDEX_ID, " + + s"$TABLE_NAME.$TERM, " + + s"$TABLE_NAME.$STATUS, " + + s"$TABLE_NAME.$COMMENT, " + + s"$TABLE_NAME.$LAST_UPDATE from $TABLE_NAME" + } + + def selectStatement(id: String) : String = { + this.selectAllStatement + " where solr_index_id = '" + id + "'" + } +} \ No newline at end of file diff --git a/app/models/export/DeleteRuleExport.scala b/app/models/export/DeleteRuleExport.scala new file mode 100644 index 00000000..08b2df81 --- /dev/null +++ b/app/models/export/DeleteRuleExport.scala @@ -0,0 +1,70 @@ +package models.`export` + +import anorm.SqlParser.get +import anorm.{RowParser, ~} +import models.`export`.JsonExportable +import models.input.SearchInputId +import models.rules.{CommonRuleFields, DeleteRuleId} +import models.{Status, `export`} +import play.api.libs.json._ + +import java.time.LocalDateTime + +case class DeleteRuleExport(id: DeleteRuleId = DeleteRuleId(), + term: String, + isActive: Boolean, + status: Int, + searchInputId: SearchInputId, + lastUpdate: LocalDateTime) extends JsonExportable { + + def getTableName: JsString = JsString("delete_rule") + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("term"), + JsString("search_input_id"), + JsString("last_update"), + JsString("status") + ) + ) + } + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsString(term), + JsString(searchInputId.toString), + JsString(lastUpdate.toString), + JsNumber(status) + ) + ) + } + +} + +object DeleteRuleExport extends CommonRuleFields { + + val TABLE_NAME = "delete_rule" + + implicit val jsonFormat: OFormat[DeleteRuleExport] = Json.format[DeleteRuleExport] + + val sqlParser: RowParser[DeleteRuleExport] = { + get[DeleteRuleId](s"$TABLE_NAME.$ID") ~ + get[String](s"$TABLE_NAME.$TERM") ~ + get[Int](s"$TABLE_NAME.$STATUS") ~ + get[SearchInputId](s"$TABLE_NAME.$SEARCH_INPUT_ID") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") map { case id ~ term ~ status ~ searchInputId ~ lastUpdate => + `export`.DeleteRuleExport(id, term, Status.isActiveFromStatus(status), status, searchInputId, lastUpdate) + } + } + + val selectAllStatement = s"select id, term, search_input_id, last_update, status from delete_rule" + + def selectStatement(id: String) : String = { + this.selectAllStatement + " where search_input_id in (select id from search_input where solr_index_id = '" + id + "')" + } + +} \ No newline at end of file diff --git a/app/models/export/Exporter.scala b/app/models/export/Exporter.scala new file mode 100644 index 00000000..0092317f --- /dev/null +++ b/app/models/export/Exporter.scala @@ -0,0 +1,145 @@ +package models.`export` + +import anorm.SQL +import models.DatabaseExecutionContext +import models.FeatureToggleModel.FeatureToggleService +import play.api.libs.json.{JsArray, JsObject, JsValue} +import play.api.Logging +import play.api.db.DBApi +import javax.inject.Inject + +@javax.inject.Singleton +class Exporter @Inject()(dbApi: DBApi, + toggleService: FeatureToggleService) + (implicit ec: DatabaseExecutionContext) extends Logging { + + private val db = dbApi.database("default") + + def getAllTablesForJs(tables : IndexedSeq[IndexedSeq[JsonExportable]]): JsValue = { + var aggregation: Seq[JsValue] = Seq[JsValue]() + getAllTablesForJs1(tables, aggregation) + } + + def getAllTablesForJs1(tables : IndexedSeq[IndexedSeq[JsonExportable]], aggregation: Seq[JsValue]): JsValue = { + if (tables.nonEmpty) { + val headTable = tables.head // the first table + val remainingTables = tables.drop(1) // a list of the tables AFTER the first table + if (headTable.nonEmpty) { + logger.debug("In Exporter.getAllTablesForJs1 : head table was non-empty") + val name: (String, JsValue) = "tableName" -> headTable.seq(0).getTableName + logger.debug("table name: "+ name) + val cols: (String, JsValue) = "columns" -> headTable.seq(0).getColumns + val rows: (String, JsValue) = "rows" -> asIndexedSeqForJs(headTable) + val obj: (JsValue) = JsObject(IndexedSeq(name, cols, rows)) + getAllTablesForJs1(remainingTables, aggregation :+ obj) + } else { + logger.debug("In Exporter.getAllTablesForJs1 : return aggregation because headTable is empty") + JsArray(aggregation.toIndexedSeq) + } + } else { + logger.debug("In Exporter.getAllTablesForJs1 : return aggregation because tables is empty") + JsArray(aggregation.toIndexedSeq) + } + } + + def asIndexedSeqForJs(indexedSeqSource : IndexedSeq[JsonExportable]): JsValue = { + var target: IndexedSeq[JsValue] = IndexedSeq[JsValue]() + for((element,index) <- indexedSeqSource.view.zipWithIndex) { + logger.debug("In Exporter.asIndexedSeqForJs : String #" + index + " is " + element.getRow) + target = target :+ element.getRow + } + JsArray(target.toIndexedSeq) + } + + def getSearchInputsFromDatabase(id: String): IndexedSeq[SearchInputExport] = db.withConnection { + implicit connection => { + SQL(SearchInputExport.selectStatement(id)).as(SearchInputExport.sqlParser.*).toIndexedSeq + } + } + + def getDeleteRulesFromDatabase(id: String): IndexedSeq[DeleteRuleExport] = db.withConnection { + implicit connection => { + SQL(DeleteRuleExport.selectStatement(id)).as(DeleteRuleExport.sqlParser.*).toIndexedSeq + } + } + + def getFilterRulesFromDatabase(id: String): IndexedSeq[FilterRuleExport] = db.withConnection { + implicit connection => { + SQL(FilterRuleExport.selectStatement(id)).as(FilterRuleExport.sqlParser.*).toIndexedSeq + } + } + + def getSynonymRulesFromDatabase(id: String): IndexedSeq[SynonymRuleExport] = db.withConnection { + implicit connection => { + SQL(SynonymRuleExport.selectStatement(id)).as(SynonymRuleExport.sqlParser.*).toIndexedSeq + } + } + + def getUpDownRulesFromDatabase(id: String): IndexedSeq[UpDownRuleExport] = db.withConnection { + implicit connection => { + SQL(UpDownRuleExport.selectStatement(id)).as(UpDownRuleExport.sqlParser.*).toIndexedSeq + } + } + + def getRedirectRulesFromDatabase(id: String): IndexedSeq[RedirectRuleExport] = db.withConnection { + implicit connection => { + SQL(RedirectRuleExport.selectStatement(id)).as(RedirectRuleExport.sqlParser.*).toIndexedSeq + } + } + + def getSolrIndexFromDatabase(id: String): IndexedSeq[SolrIndexExport] = db.withConnection { + implicit connection => { + SQL(SolrIndexExport.selectStatement(id)).as(SolrIndexExport.sqlParser.*).toIndexedSeq + } + } + + def getSuggestedSolrFieldsFromDatabase(id: String): IndexedSeq[SuggestedSolrFieldExport] = db.withConnection { + implicit connection => { + SQL(SuggestedSolrFieldExport.selectStatement(id)).as(SuggestedSolrFieldExport.sqlParser.*).toIndexedSeq + } + } + + def getInputTagsFromDatabase(id: String): IndexedSeq[InputTagExport] = db.withConnection { + implicit connection => { + SQL(InputTagExport.selectStatement(id)).as(InputTagExport.sqlParser.*).toIndexedSeq + } + } + + def getTagInputAssociationsFromDatabase(id: String): IndexedSeq[TagInputAssociationExport] = db.withConnection { + implicit connection => { + SQL(TagInputAssociationExport.selectStatement(id)).as(TagInputAssociationExport.sqlParser.*).toIndexedSeq + } + } + + def getCanonicalSpellingsFromDatabase(id: String): IndexedSeq[CanonicalSpellingExport] = db.withConnection { + implicit connection => { + SQL(CanonicalSpellingExport.selectStatement(id)).as(CanonicalSpellingExport.sqlParser.*).toIndexedSeq + } + } + + def getAlternativeSpellingsFromDatabase(id: String): IndexedSeq[AlternativeSpellingExport] = db.withConnection { + implicit connection => { + SQL(AlternativeSpellingExport.selectStatement(id)).as(AlternativeSpellingExport.sqlParser.*).toIndexedSeq + } + } + + def getDatabaseJsonWithId(id: String): JsValue = { + logger.debug("In Exporter.getDatabaseJsonWithId") + val tableSeq = IndexedSeq( + getSolrIndexFromDatabase(id), + getSearchInputsFromDatabase(id), + getRedirectRulesFromDatabase(id), + getSynonymRulesFromDatabase(id), + getUpDownRulesFromDatabase(id), + getDeleteRulesFromDatabase(id), + getFilterRulesFromDatabase(id), + getSuggestedSolrFieldsFromDatabase(id), + getInputTagsFromDatabase(id), + getTagInputAssociationsFromDatabase(id), + getCanonicalSpellingsFromDatabase(id), + getAlternativeSpellingsFromDatabase(id) + ) + getAllTablesForJs(tableSeq) + } + +} diff --git a/app/models/export/FilterRuleExport.scala b/app/models/export/FilterRuleExport.scala new file mode 100644 index 00000000..96cf4a90 --- /dev/null +++ b/app/models/export/FilterRuleExport.scala @@ -0,0 +1,69 @@ +package models.export + +import anorm.SqlParser.get +import anorm.{RowParser, ~} +import models.input.SearchInputId +import models.rules.{CommonRuleFields, FilterRule, FilterRuleId} +import models.{Status, `export`} +import play.api.libs.json._ + +import java.time.LocalDateTime + +case class FilterRuleExport(id: FilterRuleId, + term: String, + isActive: Boolean, + status: Int, + searchInputId: SearchInputId, + lastUpdate: LocalDateTime) extends JsonExportable { + + def getTableName: JsString = JsString("filter_rule") + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("term"), + JsString("search_input_id"), + JsString("last_update"), + JsString("status") + ) + ) + } + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsString(term), + JsString(searchInputId.toString), + JsString(lastUpdate.toString), + JsNumber(status) + ) + ) + } + +} + +object FilterRuleExport extends CommonRuleFields { + + val TABLE_NAME = "filter_rule" + + implicit val jsonFormat: OFormat[FilterRule] = Json.format[FilterRule] + + val sqlParser: RowParser[FilterRuleExport] = { + get[FilterRuleId](s"$TABLE_NAME.$ID") ~ + get[String](s"$TABLE_NAME.$TERM") ~ + get[Int](s"$TABLE_NAME.$STATUS") ~ + get[SearchInputId](s"$TABLE_NAME.$SEARCH_INPUT_ID") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") map { case id ~ term ~ status ~ searchInputId ~ lastUpdate => + FilterRuleExport(id, term, Status.isActiveFromStatus(status), status, searchInputId, lastUpdate) + } + } + + val selectAllStatement = s"select $TABLE_NAME.$ID, $TABLE_NAME.$TERM, $TABLE_NAME.$STATUS, $TABLE_NAME.$SEARCH_INPUT_ID, $TABLE_NAME.$LAST_UPDATE from $TABLE_NAME" + + def selectStatement(id: String) : String = { + this.selectAllStatement + " where search_input_id in (select id from search_input where solr_index_id = '" + id + "')" + } + +} \ No newline at end of file diff --git a/app/models/export/InputTagExport.scala b/app/models/export/InputTagExport.scala new file mode 100644 index 00000000..04c1bdb2 --- /dev/null +++ b/app/models/export/InputTagExport.scala @@ -0,0 +1,89 @@ +package models.export + +import anorm.SqlParser.get +import anorm.{RowParser, ~} +import models.SolrIndexId +import models.input.InputTagId +import models.rules.CommonRuleFields +import play.api.libs.json._ + +import java.time.LocalDateTime + +case class InputTagExport(id: InputTagId, + solrIndexId: SolrIndexId, + property: String, + tagValue: String, + exported: Int, + predefined: Int, + lastUpdate: LocalDateTime) extends JsonExportable { + + def getTableName: JsString = JsString("input_tag") + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("solr_index_id"), + JsString("property"), + JsString("tag_value"), + JsString("exported"), + JsString("predefined"), + JsString("last_update") + ) + ) + } + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsString(solrIndexId.toString), + JsString(property), + JsString(tagValue), + JsNumber(exported), + JsNumber(predefined), + JsString(lastUpdate.toString) + ) + ) + } +} + +object InputTagExport { + + val TABLE_NAME = "input_tag" + val ID = "id" + val SOLR_INDEX_ID = "solr_index_id" + val PROPERTY = "property" + val TAG_VALUE = "tag_value" + val EXPORTED = "exported" + val PREDEFINED = "predefined" + val LAST_UPDATE = "last_update" + + val sqlParser: RowParser[InputTagExport] = { + get[InputTagId](s"$TABLE_NAME.$ID") ~ + get[SolrIndexId](s"$TABLE_NAME.$SOLR_INDEX_ID") ~ + get[String](s"$TABLE_NAME.$PROPERTY") ~ + get[String](s"$TABLE_NAME.$TAG_VALUE") ~ + get[Int](s"$TABLE_NAME.$EXPORTED") ~ + get[Int](s"$TABLE_NAME.$PREDEFINED") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") map { + case id ~ solrIndexId ~ property ~ tagValue ~ exported ~ predefined ~ lastUpdate => + InputTagExport(id, solrIndexId, property, tagValue, exported, predefined, lastUpdate) + } + } + + val selectAllStatement: String = { + s"select $TABLE_NAME.$ID, " + + s"$TABLE_NAME.$SOLR_INDEX_ID, " + + s"$TABLE_NAME.$PROPERTY, " + + s"$TABLE_NAME.$TAG_VALUE, " + + s"$TABLE_NAME.$EXPORTED, " + + s"$TABLE_NAME.$PREDEFINED, " + + s"$TABLE_NAME.$LAST_UPDATE from $TABLE_NAME" + } + + def selectStatement(id: String) : String = { + this.selectAllStatement + s" where $TABLE_NAME.$SOLR_INDEX_ID = '" + id + "'" + } + +} \ No newline at end of file diff --git a/app/models/export/JsonExportable.scala b/app/models/export/JsonExportable.scala new file mode 100644 index 00000000..40fc0165 --- /dev/null +++ b/app/models/export/JsonExportable.scala @@ -0,0 +1,13 @@ +package models.`export` + +import play.api.libs.json.{JsString, JsValue} + +trait JsonExportable { + + def getTableName: JsString + + def getRow: JsValue + + def getColumns: JsValue + +} diff --git a/app/models/export/RedirectRuleExport.scala b/app/models/export/RedirectRuleExport.scala new file mode 100644 index 00000000..48c572db --- /dev/null +++ b/app/models/export/RedirectRuleExport.scala @@ -0,0 +1,67 @@ +package models.`export` + +import anorm.SqlParser.get +import anorm.{RowParser, ~} +import models.`export` +import models.`export`.JsonExportable +import models.input.SearchInputId +import models.rules.{CommonRuleFields, RedirectRule, RedirectRuleId} +import play.api.libs.json._ + +import java.time.LocalDateTime + +case class RedirectRuleExport(id: RedirectRuleId = RedirectRuleId(), + target: String, + searchInputId: SearchInputId, + lastUpdate: LocalDateTime, + status: Int) extends JsonExportable { + + def getTableName: JsString = JsString("redirect_rule") + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("target"), + JsString("search_input_id"), + JsString("last_update"), + JsString("status") + ) + ) + } + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsString(target), + JsString(searchInputId.toString), + JsString(lastUpdate.toString), + JsNumber(status) + ) + ) + } +} + +object RedirectRuleExport extends CommonRuleFields { + + val TABLE_NAME = "redirect_rule" + val TARGET = "target" + + val sqlParser: RowParser[RedirectRuleExport] = { + get[RedirectRuleId](s"$TABLE_NAME.$ID") ~ + get[String](s"$TABLE_NAME.$TARGET") ~ + get[SearchInputId](s"$TABLE_NAME.$SEARCH_INPUT_ID") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") ~ + get[Int](s"$TABLE_NAME.$STATUS") map { case id ~ target ~ searchInputId ~ lastUpdate ~ status => + RedirectRuleExport(id, target, searchInputId, lastUpdate, status) + } + } + + val selectAllStatement = s"select $TABLE_NAME.$ID, $TABLE_NAME.$TARGET, $TABLE_NAME.$SEARCH_INPUT_ID, $TABLE_NAME.$LAST_UPDATE, $TABLE_NAME.$STATUS from $TABLE_NAME" + + def selectStatement(id: String) : String = { + this.selectAllStatement + " where search_input_id in (select id from search_input where solr_index_id = '" + id + "')" + } + +} \ No newline at end of file diff --git a/app/models/export/SearchInputExport.scala b/app/models/export/SearchInputExport.scala new file mode 100644 index 00000000..4accfb48 --- /dev/null +++ b/app/models/export/SearchInputExport.scala @@ -0,0 +1,78 @@ +package models.`export` + +import anorm.SqlParser.get +import anorm._ +import models._ +import models.`export`.JsonExportable +import models.input.{SearchInput, SearchInputId} +import play.api.libs.json._ + +import java.time.LocalDateTime + +case class SearchInputExport(id: SearchInputId, + solrIndexId: SolrIndexId, + term: String, + lastUpdate: LocalDateTime, + status: Int, + comment: String) extends JsonExportable { + + def getTableName: JsString = JsString("search_input") + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsString(term), + JsString(solrIndexId.toString), + JsString(lastUpdate.toString), + JsNumber(status), + JsString(comment) + ) + ) + } + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("term"), + JsString("solr_index_id"), + JsString("last_update"), + JsString("status"), + JsString("comment") + ) + ) + } + +} + +object SearchInputExport { + + implicit val jsonFormat: OFormat[SearchInput] = Json.format[SearchInput] + + val TABLE_NAME = "search_input" + val ID = "id" + val TERM = "term" + val SOLR_INDEX_ID = "solr_index_id" + val LAST_UPDATE = "last_update" + val STATUS = "status" + val COMMENT = "comment" + + val sqlParser: RowParser[SearchInputExport] = { + get[SearchInputId](s"$TABLE_NAME.$ID") ~ + get[String](s"$TABLE_NAME.$TERM") ~ + get[SolrIndexId](s"$TABLE_NAME.$SOLR_INDEX_ID") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") ~ + get[Int](s"$TABLE_NAME.$STATUS") ~ + get[String](s"$TABLE_NAME.$COMMENT") map { case id ~ term ~ indexId ~ lastUpdate ~ status ~ comment => + models.`export`.SearchInputExport(id, indexId, term, lastUpdate, status, comment) + } + } + + val selectAllStatement = s"select id, term, solr_index_id, last_update, status, comment from search_input" + + def selectStatement(id: String) : String = { + s"select id, term, solr_index_id, last_update, status, comment from search_input where solr_index_id = '" + id + "'" + } + +} diff --git a/app/models/export/SolrIndexExport.scala b/app/models/export/SolrIndexExport.scala new file mode 100644 index 00000000..a98d0949 --- /dev/null +++ b/app/models/export/SolrIndexExport.scala @@ -0,0 +1,62 @@ +package models.`export` + +import anorm.SqlParser.get +import anorm.{RowParser, SQL, ~} +import models.{SolrIndexId, `export`} +import models.rules.CommonRuleFields +import play.api.libs.json._ + +import java.time.LocalDateTime + +case class SolrIndexExport(id: SolrIndexId, + name: String, + description: String, + lastUpdate: LocalDateTime) extends JsonExportable { + + def getTableName: JsString = JsString("solr_index") + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("name"), + JsString("description"), + JsString("last_update") + ) + ) + } + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsString(name), + JsString(description), + JsString(lastUpdate.toString) + ) + ) + } +} + +object SolrIndexExport extends CommonRuleFields { + + val TABLE_NAME = "solr_index" + val NAME = "name" + val DESCRIPTION = "description" + + val sqlParser: RowParser[SolrIndexExport] = { + get[SolrIndexId](s"$TABLE_NAME.$ID") ~ + get[String](s"$TABLE_NAME.$ID") ~ + get[String](s"$TABLE_NAME.$DESCRIPTION") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") map { case id ~ name ~ description ~ lastUpdate => + SolrIndexExport(id, name, description, lastUpdate) + } + } + + val selectAllStatement = s"select $TABLE_NAME.$ID, $TABLE_NAME.$ID, $TABLE_NAME.$DESCRIPTION, $TABLE_NAME.$LAST_UPDATE from $TABLE_NAME" + + def selectStatement(id: String) = { + s"select $TABLE_NAME.$ID, $TABLE_NAME.$ID, $TABLE_NAME.$DESCRIPTION, $TABLE_NAME.$LAST_UPDATE from $TABLE_NAME where $TABLE_NAME.$ID = '" + id + "'" + } + +} \ No newline at end of file diff --git a/app/models/export/SuggestedSolrFieldExport.scala b/app/models/export/SuggestedSolrFieldExport.scala new file mode 100644 index 00000000..456b75b0 --- /dev/null +++ b/app/models/export/SuggestedSolrFieldExport.scala @@ -0,0 +1,66 @@ +package models.`export` + +import anorm.SqlParser.get +import anorm._ +import models._ +import models.`export`.JsonExportable +import models.input.{SearchInput, SearchInputId} +import play.api.libs.json._ + +import java.time.LocalDateTime + +case class SuggestedSolrFieldExport(id: SuggestedSolrFieldId, + name: String, + solrIndexId: SolrIndexId, + lastUpdate: LocalDateTime) extends JsonExportable { + + def getTableName: JsString = JsString("suggested_solr_field") + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsString(name), + JsString(solrIndexId.toString), + JsString(lastUpdate.toString) + ) + ) + } + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("name"), + JsString("solr_index_id"), + JsString("last_update") + ) + ) + } + +} + +object SuggestedSolrFieldExport { + + val TABLE_NAME = "suggested_solr_field" + val ID = "id" + val NAME = "name" + val SOLR_INDEX_ID = "solr_index_id" + val LAST_UPDATE = "last_update" + + val sqlParser: RowParser[SuggestedSolrFieldExport] = { + get[SuggestedSolrFieldId](s"$TABLE_NAME.$ID") ~ + get[String](s"$TABLE_NAME.$NAME") ~ + get[SolrIndexId](s"$TABLE_NAME.$SOLR_INDEX_ID") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") map { case id ~ name ~ solrIndexId ~ lastUpdate => + SuggestedSolrFieldExport(id, name, solrIndexId, lastUpdate) + } + } + + val selectAllStatement = s"select $TABLE_NAME.$ID, $TABLE_NAME.$NAME, $TABLE_NAME.$SOLR_INDEX_ID, $TABLE_NAME.$LAST_UPDATE from $TABLE_NAME" + + def selectStatement(id: String) : String = { + this.selectAllStatement + " where solr_index_id = '" + id + "'" + } + +} diff --git a/app/models/export/SynonymRuleExport.scala b/app/models/export/SynonymRuleExport.scala new file mode 100644 index 00000000..871a6966 --- /dev/null +++ b/app/models/export/SynonymRuleExport.scala @@ -0,0 +1,73 @@ +package models.export + +import anorm.SqlParser.get +import anorm._ +import models.input.SearchInputId +import models.rules.{CommonRuleFields, SynonymRule, SynonymRuleId} +import play.api.libs.json._ + +import java.time.LocalDateTime + +case class SynonymRuleExport(id: SynonymRuleId, + synonymType: Int, + term: String, + status: Int, + searchInputId: SearchInputId, + lastUpdate: LocalDateTime) extends JsonExportable { + + def getTableName: JsString = JsString("synonym_rule") + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("synonym_type"), + JsString("term"), + JsString("search_input_id"), + JsString("last_update"), + JsString("status") + ) + ) + } + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsNumber(synonymType), + JsString(term), + JsString(searchInputId.toString), + JsString(lastUpdate.toString), + JsNumber(status) + ) + ) + } +} + +object SynonymRuleExport extends CommonRuleFields { + + val TABLE_NAME = "synonym_rule" + val TYPE = "synonym_type" + + val TYPE_UNDIRECTED = 0 + val TYPE_DIRECTED = 1 + + implicit val jsonFormat: OFormat[SynonymRule] = Json.format[SynonymRule] + + val sqlParser: RowParser[SynonymRuleExport] = { + get[SynonymRuleId](s"$TABLE_NAME.$ID") ~ + get[Int](s"$TABLE_NAME.$TYPE") ~ + get[String](s"$TABLE_NAME.$TERM") ~ + get[Int](s"$TABLE_NAME.$STATUS") ~ + get[SearchInputId](s"$TABLE_NAME.$SEARCH_INPUT_ID") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE") map { case id ~ synonymType ~ term ~ status ~ searchInputId ~ lastUpdate => + SynonymRuleExport(id, synonymType, term, status, searchInputId, lastUpdate) + } + } + + val selectAllStatement = s"select $TABLE_NAME.$ID, $TABLE_NAME.$TYPE, $TABLE_NAME.$TERM, $TABLE_NAME.$STATUS, $TABLE_NAME.$SEARCH_INPUT_ID, $TABLE_NAME.$LAST_UPDATE from $TABLE_NAME" + + def selectStatement(id: String) : String = { + this.selectAllStatement + " where search_input_id in " + s"(select id from search_input where solr_index_id = '" + id + "')" + } +} \ No newline at end of file diff --git a/app/models/export/TagInputAssociationExport.scala b/app/models/export/TagInputAssociationExport.scala new file mode 100644 index 00000000..f11ba269 --- /dev/null +++ b/app/models/export/TagInputAssociationExport.scala @@ -0,0 +1,71 @@ +package models.`export` + +import anorm.SqlParser.get +import anorm.{RowParser, SQL, ~} +import models.input.InputTagId +import models.rules.CommonRuleFields +import play.api.libs.json._ + +import java.time.LocalDateTime + +case class TagInputAssociationExport(id: InputTagId, + searchInputId: String, + lastUpdate: LocalDateTime) extends JsonExportable { + + def getTableName: JsString = JsString("tag_2_input") + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("searchInputId"), + JsString("last_update") + ) + ) + } + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsString(searchInputId), + JsString(lastUpdate.toString) + ) + ) + } +} + +object TagInputAssociationExport { + + val TABLE_NAME = "tag_2_input" + val ID = "tag_id" + val SEARCH_INPUT_ID = "input_id" + val LAST_UPDATE = "last_update" + + val sqlParser: RowParser[TagInputAssociationExport] = { + (get[InputTagId](s"$TABLE_NAME.$ID") ~ + get[String](s"$TABLE_NAME.$SEARCH_INPUT_ID") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE")) map { + case + id ~ + searchInputId ~ + lastUpdate + => + TagInputAssociationExport( + id, + searchInputId, + lastUpdate) + } + } + + val selectAllStatement : String = { + s"select $TABLE_NAME.$ID, " + + s"$TABLE_NAME.$SEARCH_INPUT_ID, " + + s"$TABLE_NAME.$LAST_UPDATE from $TABLE_NAME" + } + + def selectStatement(id: String) : String = { + this.selectAllStatement + s" where $TABLE_NAME.$SEARCH_INPUT_ID in (select id from search_input where solr_index_id = '" + id + "')" + } + +} \ No newline at end of file diff --git a/app/models/export/UpDownRuleExport.scala b/app/models/export/UpDownRuleExport.scala new file mode 100644 index 00000000..8343a993 --- /dev/null +++ b/app/models/export/UpDownRuleExport.scala @@ -0,0 +1,100 @@ +package models.export + +import anorm.SqlParser.get +import anorm.{RowParser, ~} +import models.input.SearchInputId +import models.rules.{CommonRuleFields, UpDownRuleId} +import play.api.libs.json._ +import java.time.LocalDateTime + +case class UpDownRuleExport(id: UpDownRuleId = UpDownRuleId(), + upDownType: Int, + boostMalusValue: Int, + term: String, + status: Int, + searchInputId: SearchInputId, + lastUpdate: LocalDateTime) extends JsonExportable { + + def getTableName: JsString = JsString("up_down_rule") + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("up_down_type"), + JsString("boost_malus_value"), + JsString("term"), + JsString("search_input_id"), + JsString("last_update"), + JsString("status") + ) + ) + } + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsNumber(upDownType), + JsNumber(boostMalusValue), + JsString(term), + JsString(searchInputId.toString), + JsString(lastUpdate.toString), + JsNumber(status) + ) + ) + } +} + +object UpDownRuleExport extends CommonRuleFields { + + val TABLE_NAME = "up_down_rule" + + val UP_DOWN_TYPE = "up_down_type" + val BOOST_MALUS_VALUE = "boost_malus_value" + + val TYPE_UP = 0 + val TYPE_DOWN = 1 + + implicit val jsonFormat: OFormat[UpDownRuleExport] = Json.format[UpDownRuleExport] + + val sqlParser: RowParser[UpDownRuleExport] = { + (get[UpDownRuleId](s"$TABLE_NAME.$ID") ~ + get[Int](s"$TABLE_NAME.$UP_DOWN_TYPE") ~ + get[Int](s"$TABLE_NAME.$BOOST_MALUS_VALUE") ~ + get[String](s"$TABLE_NAME.$TERM") ~ + get[Int](s"$TABLE_NAME.$STATUS") ~ + get[SearchInputId](s"$TABLE_NAME.$SEARCH_INPUT_ID") ~ + get[LocalDateTime](s"$TABLE_NAME.$LAST_UPDATE")) map { + case id ~ + upDownType ~ + boostMalusValue ~ + term ~ + status ~ + searchInputId ~ + lastUpdate => + UpDownRuleExport(id, + upDownType, + boostMalusValue, + term, + status, + searchInputId, + lastUpdate) + } + } + + val selectAllStatement : String = { + s"select $TABLE_NAME.$ID, " + + s"$TABLE_NAME.$UP_DOWN_TYPE, " + + s"$TABLE_NAME.$BOOST_MALUS_VALUE, " + + s"$TABLE_NAME.$TERM, " + + s"$TABLE_NAME.$STATUS, " + + s"$TABLE_NAME.$SEARCH_INPUT_ID, " + + s"$TABLE_NAME.$LAST_UPDATE from $TABLE_NAME" + } + + def selectStatement(id: String) : String = { + this.selectAllStatement + " where search_input_id in (select id from search_input where solr_index_id = '" + id + "')" + } + +} \ No newline at end of file diff --git a/app/models/input/ListItem.scala b/app/models/input/ListItem.scala index 05dc5070..2757ab26 100644 --- a/app/models/input/ListItem.scala +++ b/app/models/input/ListItem.scala @@ -1,3 +1,4 @@ +//CJM 5 package models.input import models.input.ListItemType.ListItemType diff --git a/app/models/input/SearchInput.scala b/app/models/input/SearchInput.scala index 821c8eaa..1713cb63 100644 --- a/app/models/input/SearchInput.scala +++ b/app/models/input/SearchInput.scala @@ -2,13 +2,11 @@ package models.input import java.sql.Connection import java.time.LocalDateTime - -import play.api.libs.json.{Json, OFormat} - +import play.api.libs.json.{JsArray, JsNumber, JsObject, JsString, JsValue, Json, OFormat} import anorm.SqlParser.get import anorm._ - import models._ +import models.`export`.JsonExportable class SearchInputId(id: String) extends Id(id) object SearchInputId extends IdObject[SearchInputId](new SearchInputId(_)) @@ -18,7 +16,7 @@ case class SearchInput(id: SearchInputId = SearchInputId(), term: String, lastUpdate: LocalDateTime, isActive: Boolean, - comment: String) { + comment: String) extends JsonExportable { import SearchInput._ @@ -33,6 +31,34 @@ case class SearchInput(id: SearchInputId = SearchInputId(), COMMENT -> comment ) + def getTableName: JsString = JsString("search_input") + + def getRow: JsValue = { + JsArray( + IndexedSeq ( + JsString(id.toString), + JsString(term), + JsString(solrIndexId.toString), + JsString(lastUpdate.toString), + JsNumber(status), + JsString(comment) + ) + ) + } + + def getColumns: JsValue = { + JsArray( + IndexedSeq ( + JsString("id"), + JsString("term"), + JsString("solr_index_id"), + JsString("last_update"), + JsString("status"), + JsString("comment") + ) + ) + } + } object SearchInput { diff --git a/app/models/reports/RulesReport.scala b/app/models/reports/RulesReport.scala index dea64c71..c2750640 100644 --- a/app/models/reports/RulesReport.scala +++ b/app/models/reports/RulesReport.scala @@ -94,6 +94,7 @@ object RulesReport extends Logging { unsortedRules.sortWith((a,b) => (compareRulesReportItem(a,b) < 0)) } + //CJM 7 // TODO write test def loadForSolrIndexId(solrIndexId: SolrIndexId)(implicit connection: Connection): RulesReport = { diff --git a/app/models/rules/DeleteRule.scala b/app/models/rules/DeleteRule.scala index 9118bfb6..cd9151bd 100644 --- a/app/models/rules/DeleteRule.scala +++ b/app/models/rules/DeleteRule.scala @@ -2,9 +2,12 @@ package models.rules import anorm.SqlParser.get import anorm.{RowParser, ~} +import models.input.SearchInputId import models.{Id, IdObject, Status} import play.api.libs.json.{Json, OFormat} +import java.time.LocalDateTime + class DeleteRuleId(id: String) extends Id(id) object DeleteRuleId extends IdObject[DeleteRuleId](new DeleteRuleId(_)) @@ -28,4 +31,5 @@ object DeleteRule extends RuleObjectWithTerm[DeleteRule] { DeleteRule(id, term, Status.isActiveFromStatus(status)) } } + } \ No newline at end of file diff --git a/app/models/rules/FilterRule.scala b/app/models/rules/FilterRule.scala index 7944eddc..14b37fff 100644 --- a/app/models/rules/FilterRule.scala +++ b/app/models/rules/FilterRule.scala @@ -2,8 +2,12 @@ package models.rules import anorm.SqlParser.get import anorm.{RowParser, ~} +import models.input.SearchInputId +import models.rules.DeleteRule.{LAST_UPDATE, SEARCH_INPUT_ID, TABLE_NAME} import models.{Id, IdObject, Status} -import play.api.libs.json.{Json, OFormat} +import play.api.libs.json.{JsArray, JsNumber, JsString, JsValue, Json, OFormat} + +import java.time.LocalDateTime class FilterRuleId(id: String) extends Id(id) object FilterRuleId extends IdObject[FilterRuleId](new FilterRuleId(_)) diff --git a/app/models/rules/RedirectRule.scala b/app/models/rules/RedirectRule.scala index 5f483025..313eb89d 100644 --- a/app/models/rules/RedirectRule.scala +++ b/app/models/rules/RedirectRule.scala @@ -4,7 +4,9 @@ import anorm.SqlParser.get import anorm.{NamedParameter, RowParser, ~} import models.input.SearchInputId import models.{Id, IdObject, Status} -import play.api.libs.json.{Json, OFormat} +import play.api.libs.json.{JsArray, JsNumber, JsString, JsValue, Json, OFormat} + +import java.time.LocalDateTime class RedirectRuleId(id: String) extends Id(id) object RedirectRuleId extends IdObject[RedirectRuleId](new RedirectRuleId(_)) @@ -19,6 +21,7 @@ case class RedirectRule(id: RedirectRuleId = RedirectRuleId(), RedirectRule.TARGET -> target ) } + } object RedirectRule extends RuleObject[RedirectRule] { diff --git a/app/models/rules/Rule.scala b/app/models/rules/Rule.scala index d3288852..529b1db8 100644 --- a/app/models/rules/Rule.scala +++ b/app/models/rules/Rule.scala @@ -1,11 +1,12 @@ package models.rules -import java.sql.Connection -import java.time.LocalDateTime - import anorm._ import models.Id import models.input.SearchInputId +import play.api.Logging + +import java.sql.Connection +import java.time.LocalDateTime trait Rule { @@ -35,7 +36,7 @@ trait RuleWithTerm extends Rule { } -trait CommonRuleFields { +trait CommonRuleFields extends Logging { val ID = "id" val STATUS = "status" @@ -57,7 +58,7 @@ trait RuleObject[T <: Rule] extends CommonRuleFields { def updateForSearchInput(searchInputId: SearchInputId, rules: Seq[T])(implicit connection: Connection) { // TODO consider to really determine an update/delete diff to ensure that last_update timestamps only updated for affected rules - + logger.debug("RuleObject:updateForSearchInput") SQL"delete from #$TABLE_NAME where #$SEARCH_INPUT_ID = $searchInputId".execute() if (rules.nonEmpty) { @@ -74,6 +75,7 @@ trait RuleObject[T <: Rule] extends CommonRuleFields { } def loadByInputId(searchInputId: SearchInputId)(implicit connection: Connection): List[T] = { + logger.debug("RuleObject:loadByInputId") SQL"select * from #$TABLE_NAME where #$SEARCH_INPUT_ID = $searchInputId order by #$orderByField".as(sqlParser.*) } diff --git a/app/models/rules/SynonymRule.scala b/app/models/rules/SynonymRule.scala index c101e9b0..a360abc1 100644 --- a/app/models/rules/SynonymRule.scala +++ b/app/models/rules/SynonymRule.scala @@ -1,13 +1,13 @@ package models.rules -import java.sql.Connection - import anorm.SqlParser.get import anorm._ import models.input.SearchInputId import models.{Id, IdObject, Status} import play.api.libs.json.{Json, OFormat} +import java.sql.Connection + class SynonymRuleId(id: String) extends Id(id) object SynonymRuleId extends IdObject[SynonymRuleId](new SynonymRuleId(_)) @@ -21,6 +21,7 @@ case class SynonymRule(id: SynonymRuleId = SynonymRuleId(), SynonymRule.TYPE -> synonymType ) } + } object SynonymRule extends RuleObjectWithTerm[SynonymRule] { diff --git a/app/models/rules/UpDownRule.scala b/app/models/rules/UpDownRule.scala index 9f446622..fca9f8fa 100644 --- a/app/models/rules/UpDownRule.scala +++ b/app/models/rules/UpDownRule.scala @@ -22,6 +22,7 @@ case class UpDownRule(id: UpDownRuleId = UpDownRuleId(), UpDownRule.UP_DOWN_TYPE -> upDownType ) } + } object UpDownRule extends RuleObjectWithTerm[UpDownRule] { diff --git a/app/models/validatedimport/ValidatedImportData.scala b/app/models/validatedimport/ValidatedImportData.scala new file mode 100644 index 00000000..eb88d8c6 --- /dev/null +++ b/app/models/validatedimport/ValidatedImportData.scala @@ -0,0 +1,206 @@ +package models.validatedimport + +import play.api.Logging +import play.api.libs.json.{JsArray, JsObject, JsValue, Json} + +import java.time.{LocalDateTime, LocalTime} +import java.util +import java.util.UUID +import scala.collection.mutable +import anorm._ + +case class ValidatedImportData(filename: String, content: String) extends Logging { + + var validTableNames: List[String] = List( + "alternative_spelling", + "canonical_spelling", + "synonym_rule", + "up_down_rule", + "redirect_rule", + "search_input", + "solr_index", + "delete_rule", + "input_tag", + "tag_2_input", + "suggested_solr_field", + "filter_rule") + + var inputJsonValue: Option[JsValue] = None + var tableCount: Int = 0 + var tableName: String = "" + var columnList: String = "" + var currentColumns: IndexedSeq[String] = IndexedSeq() + var statement: String = "" + var statements: IndexedSeq[String] = IndexedSeq() + var solr_index_id: String = "" + var old_solr_index_id: String = "" + var a_different_existing_solr_index_id: String = UUID.randomUUID().toString + var a_shorthand_id: String = "" + val SUCCESS: Int = 0 + var inputFilename: String = "" + + var allStatementsConcatenated: String = "" + var replacementIds: mutable.HashMap[String, String] = mutable.HashMap() + + def isValid: Boolean = { + parseJson == SUCCESS + } + + def parseJson: Int = { + this.inputFilename = filename + a_different_existing_solr_index_id = UUID.randomUUID().toString + a_shorthand_id = a_different_existing_solr_index_id.substring(0, 8) + logger.debug("ValidatedImportData.parseJson():1 begin parsing") + inputJsonValue = Option(Json.parse(content)) + if (inputJsonValue.isDefined) { + val tables: IndexedSeq[JsValue] = inputJsonValue.get.as[JsArray].value + processTables(tables) + } + logger.debug("ValidatedImportData.parseJson():3 end parsing") + SUCCESS + } + + def processTables(input: IndexedSeq[JsValue]) : Unit = { + if (input.headOption.isEmpty) { + //printStatements(statements) + concatenateStatements(statements) + logger.debug(allStatementsConcatenated) + () + } else { + this.tableName = input.head.\("tableName").as[String] + if (validTableNames.contains(tableName)) { + this.tableCount = this.tableCount + 1 + + this.statement = "INSERT INTO " + tableName + " " + + val columns: IndexedSeq[JsValue] = input.head.\("columns").as[JsArray].value + this.currentColumns = IndexedSeq() + processColumns(columns, 0, (columns.size - 1)) + //logger.debug(currentColumns(1)) + + val rows: IndexedSeq[JsValue] = input.head.\("rows").as[JsArray].value + processRows(rows, 0, (rows.size - 1)) + this.statement += ";" + statements = statements :+ statement + //logger.debug(statement) + //logger.debug("tableCount: " + tableCount) + + processTables(input.drop(1)) + } + else { + logger.debug("Unrecognized tableName: " + tableName) + //skip any tables that we don't recognize + processTables(input.drop(1)) + } + } + } + + def processColumns(input: IndexedSeq[JsValue], index: Int, lastIndex: Int): Unit = { + if (input.headOption.isEmpty) { + this.statement += ") VALUES " + () + } else { + if (index == 0) this.statement += "(" + var columnName = input.head.toString().replace("\"", "") + this.currentColumns = this.currentColumns :+ columnName + this.statement += columnName + if (index != lastIndex) this.statement += "," + processColumns(input.drop(1), (index + 1), lastIndex) + } + } + + def processRow(input: IndexedSeq[JsValue], index: Int, lastIndex: Int): Unit = { + if (input.headOption.isEmpty) { + this.statement += ")" + () + } else { + if (index == 0) this.statement += "(" + var rawCellValue = input.head.toString().replace("\"", "") + var cellValue = input.head.toString().replace("\"", "\'") + + if (this.tableName.equals("solr_index")) { + //logger.debug(currentColumns(index)) + if (currentColumns(index).equals("id")) { + this.old_solr_index_id = rawCellValue; + this.solr_index_id = "'" + this.a_different_existing_solr_index_id + "'" + cellValue = solr_index_id + } else if (currentColumns(index).equals("name") || currentColumns(index).equals("description")) { + var old_id_shorthand = this.old_solr_index_id.substring(0, 8) + var sds = shortDistinguishedString + logger.debug("cur col:" + currentColumns(index)) + logger.debug("rawCellValue: " + rawCellValue) + logger.debug("old_id_shorthand: " + old_id_shorthand) + //this.solr_index_id = "'" + this.a_different_existing_solr_index_id + "'" + cellValue = "'" + rawCellValue + ", (file: " + inputFilename + " copied from key: " + old_id_shorthand + ") " + sds + "'" + } + } + else if (currentColumns(index).equals("id") || currentColumns(index).equals("search_input_id") || currentColumns(index).equals("input_id")) { + if (!replacementIds.contains(cellValue)) { + replacementIds.put(cellValue, UUID.randomUUID().toString) + } + //logger.debug("replacing " + cellValue) + cellValue = "'" + replacementIds.get(cellValue).head + "'" + //logger.debug("replaced it with :" + cellValue) + } + + if (currentColumns(index).equals("last_update")) { + //ISO_LOCAL_DATE + " " + ISO_LOCAL_TIME + val sqlDate = java.sql.Date.valueOf(LocalDateTime.now.toLocalDate) + val sqlTime = java.sql.Time.valueOf(LocalTime.now) + var isoLocalDateTimeStr = sqlDate + " " + sqlTime + cellValue = "'" + isoLocalDateTimeStr + "'" + } + + if (this.tableName.equals("search_input") || + this.tableName.equals("suggested_solr_field") || + this.tableName.equals("input_tag") + ) { + if (currentColumns(index).equals("solr_index_id")) { + cellValue = "'" + this.a_different_existing_solr_index_id + "'" + } + } + + this.statement += cellValue + if (index != lastIndex) this.statement += "," + + processRow(input.drop(1), (index + 1), lastIndex) + } + } + + def processRows(input: IndexedSeq[JsValue], index: Int, lastIndex: Int): Unit = { + if (input.headOption.isEmpty) { + () + } else { + val row: IndexedSeq[JsValue] = input.head.as[JsArray].value + processRow(row, 0, (row.size - 1)) + if (index != lastIndex) { + this.statement += "," + } + processRows(input.drop(1), (index + 1), lastIndex) + } + } + + def printStatements(input: IndexedSeq[String]): Unit = { + if (input.headOption.isEmpty) { + () + } else { + logger.debug(input.head) + this.allStatementsConcatenated = this.allStatementsConcatenated + input.head + printStatements(input.drop(1)) + } + } + + def concatenateStatements(input: IndexedSeq[String]): Unit = { + if (input.headOption.isEmpty) { + () + } else { + this.allStatementsConcatenated = this.allStatementsConcatenated + " " + input.head + printStatements(input.drop(1)) + } + } + + def shortDistinguishedString: String = { + UUID.randomUUID().toString.substring(0, 8) + } + +} \ No newline at end of file diff --git a/app/models/validatedimport/ValidatedImportImporter.scala b/app/models/validatedimport/ValidatedImportImporter.scala new file mode 100644 index 00000000..01ba9c46 --- /dev/null +++ b/app/models/validatedimport/ValidatedImportImporter.scala @@ -0,0 +1,31 @@ +package models.validatedimport + +import anorm.SQL +import models.DatabaseExecutionContext +import models.FeatureToggleModel.FeatureToggleService +import play.api.{Logging, db} +import play.api.db.DBApi + +import javax.inject.Inject + +@javax.inject.Singleton +class ValidatedImportImporter @Inject()(validatedImportData: ValidatedImportData, + dbapi: DBApi, + toggleService: FeatureToggleService) + (implicit ec: DatabaseExecutionContext) extends Logging { + + private val db = dbapi.database("default") + + def performImport(): Unit = db.withTransaction { + implicit connection => { + if (validatedImportData.isValid) { + logger.debug(validatedImportData.allStatementsConcatenated) + validatedImportData.statements.foreach(statement => SQL(statement).execute()) + } + else { + logger.debug("ValidatedImportImporter.performImport():2: oops bad data?") + } + } + } + +} diff --git a/application-clean-run.sh b/application-clean-run.sh new file mode 100755 index 00000000..5612f2dd --- /dev/null +++ b/application-clean-run.sh @@ -0,0 +1,4 @@ +#!/bin/bash +./docker-rm-mysql-containers.sh +./docker-run-mysql-container.sh +source run_helper.sh \ No newline at end of file diff --git a/application-run.sh b/application-run.sh new file mode 100755 index 00000000..93831f91 --- /dev/null +++ b/application-run.sh @@ -0,0 +1,2 @@ +#!/bin/bash +source run_helper.sh \ No newline at end of file diff --git a/conf/logback.xml b/conf/logback.xml index 7831cacc..76f36d43 100644 --- a/conf/logback.xml +++ b/conf/logback.xml @@ -25,7 +25,7 @@ - + @@ -45,7 +45,7 @@ - + @@ -58,7 +58,7 @@ - + diff --git a/conf/routes b/conf/routes index 0bdf3e89..64596a8a 100644 --- a/conf/routes +++ b/conf/routes @@ -1,3 +1,5 @@ +# CJM + # Routes # This file defines all application routes (Higher priority routes first) # ~~~~ @@ -35,6 +37,8 @@ GET /api/v1/log/rule-activity-log controllers.ApiC GET /api/v1/report/rules-report/:solrIndexId controllers.ApiController.getRulesReport(solrIndexId: String) GET /api/v1/report/activity-report/:solrIndexId controllers.ApiController.getActivityReport(solrIndexId: String) GET /api/v1/version/latest-info controllers.ApiController.getLatestVersionInfo() +GET /api/v1/get-export-with-id/:id controllers.ApiController.getDatabaseJsonWithId(id: String) +POST /api/v1/upload-import controllers.ApiController.uploadImport # Map static resources from the /public folder to the /assets URL path GET /*file controllers.FrontendController.assetOrDefault(file) diff --git a/docker-rm-mysql-containers.sh b/docker-rm-mysql-containers.sh new file mode 100755 index 00000000..f32a7232 --- /dev/null +++ b/docker-rm-mysql-containers.sh @@ -0,0 +1,3 @@ +#!/bin/bash +docker ps -a -q --filter "ancestor=mysql:5.7" | xargs -I {} docker container stop {} +docker ps -a -q --filter "ancestor=mysql:5.7" | xargs -I {} docker container rm {} diff --git a/docker-run-mysql-container.sh b/docker-run-mysql-container.sh new file mode 100755 index 00000000..ead85fad --- /dev/null +++ b/docker-run-mysql-container.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +# make sure to start docker desktop first + +docker run --name smui-mysql -e MYSQL_ROOT_PASSWORD=smui -e MYSQL_USER=smui -e MYSQL_PASSWORD=smui -e MYSQL_DATABASE=smui -p 3306:3306 -d mysql:5.7 \ No newline at end of file diff --git a/docker-what-is-mysql-container-id.sh b/docker-what-is-mysql-container-id.sh new file mode 100755 index 00000000..13af3720 --- /dev/null +++ b/docker-what-is-mysql-container-id.sh @@ -0,0 +1,2 @@ +#!/bin/bash +docker ps -a -q --filter "ancestor=mysql:5.7" \ No newline at end of file diff --git a/frontend/src/app/app-routing.module.ts b/frontend/src/app/app-routing.module.ts index 62b5dfa6..b26156ca 100644 --- a/frontend/src/app/app-routing.module.ts +++ b/frontend/src/app/app-routing.module.ts @@ -4,6 +4,7 @@ import { Routes, RouterModule } from '@angular/router'; import { SearchManagementComponent } from './components/search-management'; import { ReportComponent } from './components/report'; import { AdminComponent } from './components/admin'; +import { ImportComponent } from './components/import'; import { SuggestedFieldsComponent } from './components/admin/suggested-fields'; const routes: Routes = [ @@ -11,6 +12,7 @@ const routes: Routes = [ { path: 'rules', component: SearchManagementComponent }, { path: 'report', component: ReportComponent }, { path: 'admin', component: AdminComponent }, + { path: 'import', component: ImportComponent }, { path: 'admin/suggested-fields/:solrIndexId', component: SuggestedFieldsComponent } ]; diff --git a/frontend/src/app/app.module.ts b/frontend/src/app/app.module.ts index 313ea99e..77ef5e99 100644 --- a/frontend/src/app/app.module.ts +++ b/frontend/src/app/app.module.ts @@ -43,6 +43,15 @@ import { SuggestedFieldsCreateComponent, SuggestedFieldsListComponent } from './components/admin/suggested-fields'; + +import { + ImportComponent +} from './components/import'; + +import { + ImportRulesCollectionCreateComponent +} from './components/import/import-rules-collection-create'; + import { ButtonRowComponent, CardComponent, @@ -59,6 +68,7 @@ import { RulesListComponent, RulesSearchComponent } from './components/rules-panel'; +import {FileUploadComponent} from "./components/import/import-rules-collection-create/file-upload.component"; @NgModule({ imports: [ @@ -96,7 +106,10 @@ import { RulesCollectionCreateComponent, SuggestedFieldsComponent, SuggestedFieldsCreateComponent, - SuggestedFieldsListComponent + SuggestedFieldsListComponent, + ImportComponent, + ImportRulesCollectionCreateComponent, + FileUploadComponent ], providers: [ CommonsService, diff --git a/frontend/src/app/components/admin/admin.component.ts b/frontend/src/app/components/admin/admin.component.ts index cc66c607..42a87837 100644 --- a/frontend/src/app/components/admin/admin.component.ts +++ b/frontend/src/app/components/admin/admin.component.ts @@ -38,7 +38,6 @@ export class AdminComponent implements OnInit { this.toasterService.pop('error', '', msgText); } - // @ts-ignore public openDeleteConfirmModal({ deleteCallback }) { const deferred = this.modalService.open('confirm-delete'); diff --git a/frontend/src/app/components/admin/rules-collection-create/rules-collection-create.component.ts b/frontend/src/app/components/admin/rules-collection-create/rules-collection-create.component.ts index ea64f919..10eccfb9 100644 --- a/frontend/src/app/components/admin/rules-collection-create/rules-collection-create.component.ts +++ b/frontend/src/app/components/admin/rules-collection-create/rules-collection-create.component.ts @@ -64,9 +64,16 @@ export class RulesCollectionCreateComponent implements OnInit, OnChanges { .then(() => this.showSuccessMsg.emit("Created new Rules Collection " + this.description)) .then(() => this.solrService.emitRulesCollectionChangeEvent("")) .then(() => this.clearForm()) - .catch(error => this.showErrorMsg.emit(error)); + .catch(error => { + console.log(error); + var errorMsg = 'Unknown Error' + if ('message' in error.error) { + errorMsg = error.error.message; + } + this.showErrorMsg.emit(errorMsg); + }); + } else { + this.showErrorMsg.emit("Fill in both name fields."); } } - - } diff --git a/frontend/src/app/components/admin/rules-collection-list/rules-collection-list.component.html b/frontend/src/app/components/admin/rules-collection-list/rules-collection-list.component.html index f31e294b..cfff05cc 100644 --- a/frontend/src/app/components/admin/rules-collection-list/rules-collection-list.component.html +++ b/frontend/src/app/components/admin/rules-collection-list/rules-collection-list.component.html @@ -6,20 +6,50 @@
Available Rules Collections in SMUI
  • - {{ solrIndex.description }} ({{ solrIndex.name }})- Key: {{ solrIndex.id }} - - Configure - - + Description: {{ solrIndex.description }}
    + Name: {{ solrIndex.name }}
    + Key: {{ solrIndex.id }}
    + +
    + + + +   + + + +   + + +   +
    +
  • diff --git a/frontend/src/app/components/admin/rules-collection-list/rules-collection-list.component.ts b/frontend/src/app/components/admin/rules-collection-list/rules-collection-list.component.ts index fe6aedc7..f0bd7fb1 100644 --- a/frontend/src/app/components/admin/rules-collection-list/rules-collection-list.component.ts +++ b/frontend/src/app/components/admin/rules-collection-list/rules-collection-list.component.ts @@ -8,11 +8,21 @@ import { SimpleChanges } from '@angular/core'; -import { SolrIndex } from '../../../models'; +import { SolrIndex, RulesReport } from '../../../models'; + +import { ToasterService } from 'angular2-toaster'; + + +import { DownloadableRule } from '../../../models/downloadableRule.model'; +import { DownloadableRules } from '../../../models/downloadableRules.model'; + import { + ReportService, SolrService, ModalService } from '../../../services'; +import {HttpClient} from "@angular/common/http"; +import {Subject} from "rxjs"; @Component({ selector: 'app-smui-admin-rules-collection-list', @@ -24,10 +34,26 @@ export class RulesCollectionListComponent implements OnInit, OnChanges { @Output() showErrorMsg: EventEmitter = new EventEmitter(); @Output() showSuccessMsg: EventEmitter = new EventEmitter(); @Output() solrIndicesChange: EventEmitter = new EventEmitter(); + rulesReport?: RulesReport; + downloadableRules?: DownloadableRules; + //savedOutput?: ListItem; //CJM + private readonly baseUrl = 'api/v1'; + + currentSolrIndexId = '-1'; + currentSolrIndexIdSubject: Subject = new Subject(); + collectionName: String; constructor( private solrService: SolrService, + private toasterService: ToasterService, + private reportService: ReportService, + private http: HttpClient ) { + + this.currentSolrIndexIdSubject.subscribe( + value => (this.currentSolrIndexId = value) + ); + } getSolrIndices() { @@ -42,6 +68,42 @@ export class RulesCollectionListComponent implements OnInit, OnChanges { console.log('In RulesCollectionListComponent :: ngOnChanges'); } + getSuggestedFields(solrIndex: SolrIndex) { + var suggestedFields1 = null; + console.log("solrIndex.id is: " + solrIndex.id); + this.solrService.getSuggestedFields(solrIndex.id) + .then(suggestedFields => { + console.log('got here'); + suggestedFields1 = suggestedFields; + console.log(suggestedFields1); + }) + .catch(error => this.showErrorMsg.emit(error)); + } + + downloadRulesCollectionExport(id:String, event: Event) { + console.log("download()"); + this.solrService.getSolrIndex(id.toString()).then(solrIndex => this.collectionName = solrIndex.name); + this.solrService.getExportWithId(id).then( + result => { + var str = JSON.stringify(result); + this.downloadStringAsFile( + this.collectionName + ".json.txt", + str); + this.showSuccessMsg.emit("Download: OK") + } + ); + } + + downloadStringAsFile(filename: string, text: string) { + var element = document.createElement('a'); + element.setAttribute('href', 'data:text/plain;charset=utf-8,' + encodeURIComponent(text)); + element.setAttribute('download', filename); + element.style.display = 'none'; + document.body.appendChild(element); + element.click(); + document.body.removeChild(element); + } + deleteRulesCollection(id: string, event: Event) { event.stopPropagation(); const deleteCallback = () => @@ -62,4 +124,5 @@ export class RulesCollectionListComponent implements OnInit, OnChanges { this.openDeleteConfirmModal.emit({ deleteCallback }); } + } diff --git a/frontend/src/app/components/header-nav/header-nav.component.html b/frontend/src/app/components/header-nav/header-nav.component.html index 5b0ecaaf..6761c06b 100644 --- a/frontend/src/app/components/header-nav/header-nav.component.html +++ b/frontend/src/app/components/header-nav/header-nav.component.html @@ -63,6 +63,14 @@ >Admin +
  • + +   +   + + {{fileName || "No file imported yet."}} {{fileMessage}} + + + + + diff --git a/frontend/src/app/components/import/import-rules-collection-create/file-upload.component.scss b/frontend/src/app/components/import/import-rules-collection-create/file-upload.component.scss new file mode 100644 index 00000000..8b9b1a2d --- /dev/null +++ b/frontend/src/app/components/import/import-rules-collection-create/file-upload.component.scss @@ -0,0 +1,3 @@ +.file-input { + display: none; +} diff --git a/frontend/src/app/components/import/import-rules-collection-create/file-upload.component.ts b/frontend/src/app/components/import/import-rules-collection-create/file-upload.component.ts new file mode 100644 index 00000000..7d0b902d --- /dev/null +++ b/frontend/src/app/components/import/import-rules-collection-create/file-upload.component.ts @@ -0,0 +1,62 @@ +import {Component, ElementRef, EventEmitter, Output, ViewChild} from "@angular/core"; +import {HttpClient} from "@angular/common/http"; +import {ToasterService} from "angular2-toaster"; +import { + SolrService, + ModalService +} from '../../../services'; +import {SolrIndex} from "../../../models"; +import {RulesCollectionCreateComponent} from "../../admin"; + +@Component({ + selector: 'file-upload', + templateUrl: "file-upload.component.html", + styleUrls: ["file-upload.component.scss"] +}) +export class FileUploadComponent { + fileName = ''; + fileMessage = ''; + target: EventTarget | null; + + @ViewChild('fileUpload') + myInputVariable: ElementRef; + @Output() solrIndicesChange: EventEmitter = new EventEmitter(); + @Output() showErrorMsg: EventEmitter = new EventEmitter(); + @Output() refreshRulesCollectionList: EventEmitter = new EventEmitter(); + + constructor(private http: HttpClient, + private toasterService: ToasterService, + private solrService: SolrService) { + } + + public showSuccessMsg(msgText: string) { + this.toasterService.pop('success', '', msgText); + } + + refreshSolrIndicies() { + return this.solrService.listAllSolrIndices; + } + + onFileSelected(event: Event) { + const target = event.target as HTMLInputElement; + if (target.files != null) { + const file: File = target.files[0]; + if (file) { + this.fileName = file.name; + this.fileMessage = '... processing ...'; + const formData = new FormData(); + formData.append("uploadedFile", file); + this.showSuccessMsg(this.fileName + this.fileMessage); + const upload$ = this.http.post("/api/v1/upload-import", formData).toPromise() + .then(() => { + this.myInputVariable.nativeElement.value = ''; + this.refreshSolrIndicies(); + }) + .then(() => { this.solrService.listAllSolrIndices().then(() => this.solrService.emitRulesCollectionChangeEvent("")); }) + .then(() => { this.fileMessage = '... Done Processing.'; }) + .then(() => { this.showSuccessMsg("Imported: " + this.fileName); }) + } + } + } + +} diff --git a/frontend/src/app/components/import/import-rules-collection-create/import-rules-collection-create.component.html b/frontend/src/app/components/import/import-rules-collection-create/import-rules-collection-create.component.html new file mode 100644 index 00000000..ceb1bace --- /dev/null +++ b/frontend/src/app/components/import/import-rules-collection-create/import-rules-collection-create.component.html @@ -0,0 +1,94 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +

    Import

    + + + + + + + + + + + + + + + + + + diff --git a/frontend/src/app/components/import/import-rules-collection-create/import-rules-collection-create.component.ts b/frontend/src/app/components/import/import-rules-collection-create/import-rules-collection-create.component.ts new file mode 100644 index 00000000..3f694872 --- /dev/null +++ b/frontend/src/app/components/import/import-rules-collection-create/import-rules-collection-create.component.ts @@ -0,0 +1,130 @@ +import { + Component, + Input, + Output, + EventEmitter, + OnChanges, + OnInit, + SimpleChanges +} from '@angular/core'; + +import { SolrIndex } from '../../../models'; +import { + SolrService, + ModalService +} from '../../../services'; + +import {FileUploadComponent} from "./file-upload.component"; + +@Component({ + selector: 'app-smui-import-import-rules-collection-create', + templateUrl: './import-rules-collection-create.component.html' +}) +export class ImportRulesCollectionCreateComponent implements OnInit, OnChanges { + + //@Output() updateRulesCollectionList: EventEmitter<> = new EventEmitter(); + @Output() showErrorMsg: EventEmitter = new EventEmitter(); + @Output() showSuccessMsg: EventEmitter = new EventEmitter(); + @Output() refreshImportRulesCollectionList: EventEmitter = new EventEmitter(); + @Output() solrIndicesChange: EventEmitter = new EventEmitter(); + fuc: FileUploadComponent; + + solrIndices: SolrIndex[]; + valueName: string; + + constructor( + private solrService: SolrService, + ) { + + } + ngOnInit() { + console.log('In ImportRulesCollectionCreateComponent :: ngOnInit'); + this.solrIndices = this.solrService.solrIndices; + } + + ngOnChanges(changes: SimpleChanges): void { + console.log('In ImportRulesCollectionCreateComponent :: ngOnChanges'); + } + + refreshSolrIndicies() { + return this.solrService.listAllSolrIndices; + // : Promise.reject('No selected Solr index'); + } + + clearForm() { + this.valueName = ''; + } + + addOneValue( event: Event){ + if (this.valueName) { + console.log("addOneValue() , thingName: " + this.valueName); + //console.log('In ImportRulesCollectidonCreateComponent :: createImportRulesCollection'); + this.solrService.putSomething2(this.valueName) + .then(() => this.showSuccessMsg.emit("Add Value: OK")); + // + // if (this.name && this.description) { + // this.solrService + // .createSolrIndex(this.name, this.description) + // .then(() => this.solrService.listAllSolrIndices()) + // .then(() => this.solrIndicesChange.emit()) + // .then(() => this.showSuccessMsg.emit("Created new Import Rules Collection " + this.description)) + // .then(() => this.solrService.emitRulesCollectionChangeEvent("")) + // .then(() => this.clearForm()) + // .catch(error => this.showErrorMsg.emit(error)); + // } + } else { + this.showErrorMsg.emit("Add Value: oops"); + } + } + + putRows(event: Event) { + this.solrService.putSomething2("test row 1").then(() => console.log("added test row 1")); + this.solrService.putSomething2("test row 2").then(() => console.log("added test row 2")); + this.solrService.putSomething2("test row 3").then(() => console.log("added test row 3")); + this.showSuccessMsg.emit("Put Rows: OK") + } + + getRows(event: Event) { + console.log("getRows()"); + //this.solrService.putSomething2("something3").then(() => console.log("done")); + //this.solrService.putSomething2("something4").then(() => console.log("done")); + this.solrService.getExport().then( + result => { + console.log(result) + this.showSuccessMsg.emit("Get Rows: OK, see console log") + } + ) + } + + download(event: Event) { + console.log("download()"); + this.solrService.getExport().then( + result => { + var str = JSON.stringify(result); + this.downloadStringAsFile( + "file.json.txt", + str); + this.showSuccessMsg.emit("Download: OK") + } + ); + } + + downloadStringAsFile(filename: string, text: string) { + var element = document.createElement('a'); + element.setAttribute('href', 'data:text/plain;charset=utf-8,' + encodeURIComponent(text)); + element.setAttribute('download', filename); + element.style.display = 'none'; + document.body.appendChild(element); + element.click(); + document.body.removeChild(element); + } + + onFileSelected(event: Event) { + console.log("event:" + event.target); + } + + putty(event: Event) { + this.solrService.putty().then(() => console.log("did putty")); + this.showSuccessMsg.emit("Putty: OK") + } +} diff --git a/frontend/src/app/components/import/import-rules-collection-create/index.ts b/frontend/src/app/components/import/import-rules-collection-create/index.ts new file mode 100644 index 00000000..0a267e3f --- /dev/null +++ b/frontend/src/app/components/import/import-rules-collection-create/index.ts @@ -0,0 +1 @@ +export * from './import-rules-collection-create.component'; diff --git a/frontend/src/app/components/import/import.component.html b/frontend/src/app/components/import/import.component.html new file mode 100644 index 00000000..6dcb1746 --- /dev/null +++ b/frontend/src/app/components/import/import.component.html @@ -0,0 +1,15 @@ + + + + +

    Are you sure you want to delete the item?

    +
    diff --git a/frontend/src/app/components/import/import.component.ts b/frontend/src/app/components/import/import.component.ts new file mode 100644 index 00000000..e91a736b --- /dev/null +++ b/frontend/src/app/components/import/import.component.ts @@ -0,0 +1,55 @@ +import { Component, OnInit, Input } from '@angular/core'; + +import { ToasterService } from 'angular2-toaster'; + +import { SolrIndex } from '../../models'; + +import { + SolrService, + ModalService + } from '../../services'; + +@Component({ + selector: 'app-smui-import', + templateUrl: './import.component.html' +}) +export class ImportComponent implements OnInit { + + constructor( + private modalService: ModalService, + private toasterService: ToasterService, + private solrService: SolrService + ) { + + } + + solrIndices: SolrIndex[]; + + ngOnInit() { + console.log('In ImportComponent :: ngOnInit'); + this.solrIndices = this.solrService.solrIndices; + } + + public showSuccessMsg(msgText: string) { + this.toasterService.pop('success', '', msgText); + } + + public showErrorMsg(msgText: string) { + this.toasterService.pop('error', '', msgText); + } + + // @ts-ignore + public openDeleteConfirmModal({ deleteCallback }) { + const deferred = this.modalService.open('confirm-delete'); + deferred.promise.then((isOk: boolean) => { + if (isOk) { deleteCallback(); } + this.modalService.close('confirm-delete'); + }); + } + + public solrIndicesChange(id: string){ + console.log("ImportComponent :: solrIndicesChange :: id = " + id) + this.solrIndices = this.solrService.solrIndices; + } + +} diff --git a/frontend/src/app/components/import/index.ts b/frontend/src/app/components/import/index.ts new file mode 100644 index 00000000..bdc340d5 --- /dev/null +++ b/frontend/src/app/components/import/index.ts @@ -0,0 +1 @@ +export * from './import.component'; diff --git a/frontend/src/app/models/downloadableRule.model.ts b/frontend/src/app/models/downloadableRule.model.ts new file mode 100644 index 00000000..07c891da --- /dev/null +++ b/frontend/src/app/models/downloadableRule.model.ts @@ -0,0 +1,17 @@ +import {FilterRule, RedirectRule} from "./rules.model"; + +export class DownloadableRule { + + type?: string; + filterRule?: FilterRule; + redirectRule?: RedirectRule; + + inputId?: string; + inputTerm?: string; + term?: string; + details?: string; + isActive?: boolean; + inputTags: Array; + modified: string; + inputModified: string; +} diff --git a/frontend/src/app/models/downloadableRules.model.ts b/frontend/src/app/models/downloadableRules.model.ts new file mode 100644 index 00000000..636d6332 --- /dev/null +++ b/frontend/src/app/models/downloadableRules.model.ts @@ -0,0 +1,6 @@ +import { DownloadableRule } from './downloadableRule.model'; + +export class DownloadableRules { + id?: string; + rules?: Array; +} diff --git a/frontend/src/app/services/solr.service.ts b/frontend/src/app/services/solr.service.ts index a10b6290..426768f9 100644 --- a/frontend/src/app/services/solr.service.ts +++ b/frontend/src/app/services/solr.service.ts @@ -112,7 +112,6 @@ export class SolrService { .toPromise(); } - createSuggestedField(solrIndexId: string, name: string): Promise { const headers = { headers: this.jsonHeader }; const body = JSON.stringify( { name: name}); @@ -159,4 +158,36 @@ export class SolrService { .toPromise(); } + putSomething2(thingName: string) { + const headers = { headers: this.jsonHeader }; + const body = JSON.stringify( { thingName: thingName }); + return this.http + .put(`${this.baseUrl}/put-something`, body, httpOptions) + .toPromise(); + } + + putty() { + const headers = { headers: this.jsonHeader }; + const body = JSON.stringify( { }); + return this.http + .put(`${this.baseUrl}/putty`, body, httpOptions) + .toPromise(); + } + + getExport() { + const headers = { headers: this.jsonHeader }; + const body = {}; + return this.http + .get(`${this.baseUrl}/get-export`, body) + .toPromise(); + } + + getExportWithId(id:String ) { + const headers = { headers: this.jsonHeader }; + const body = {}; + return this.http + .get(`${this.baseUrl}/get-export-with-id/${id}`, body) + .toPromise(); + } + } diff --git a/frontend/src/styles.css b/frontend/src/styles.css index e0ff8ad0..d6c42bee 100644 --- a/frontend/src/styles.css +++ b/frontend/src/styles.css @@ -2,3 +2,18 @@ @import '~angular2-toaster/toaster.css'; @import '~angular2-multiselect-dropdown/themes/default.theme.css'; @import '~@fortawesome/fontawesome-free/css/all.css'; + +div.horizontalgap { + float: left; + overflow: hidden; + height: 1px; + width: 0px; +} + +.float-right-top { + position: absolute; + right: 0; + top: 0; + padding-top:10px; + padding-right:10px; +} diff --git a/run_helper.sh b/run_helper.sh new file mode 100755 index 00000000..b17f665f --- /dev/null +++ b/run_helper.sh @@ -0,0 +1,8 @@ +#!/bin/bash +source ~/.zshrc +export SMUI_TOGGLE_SPELLING=true +export SMUI_TOGGLE_EVENTHISTORY=true +#sbt "run -Dconfig.file=./smui-dev.conf 9000" +#sbt "run -Dconfig.file=./smui-dev.conf 9000" +sbt run "-Dconfig.file=./smui-dev.conf 9000" +#gopen http://localhost:9000 \ No newline at end of file