From fe4ca88a53250eb761a5f6f6ab126ca49376f7e3 Mon Sep 17 00:00:00 2001
From: Konstantin Schulz
Date: Mon, 25 May 2020 12:24:37 +0200
Subject: [PATCH] moved additional classes to the OpenAPI specification
---
mc_backend/.coveragerc | 1 +
mc_backend/README.md | 4 +
.../csm/app/api/corpusStorageManagerAPI.py | 4 +-
mc_backend/csm/app/api/subgraphAPI.py | 15 +-
mc_backend/csm/app/api/textcomplexityAPI.py | 7 +-
mc_backend/mcserver/app/__init__.py | 18 +-
mc_backend/mcserver/app/api/exerciseAPI.py | 14 +-
.../mcserver/app/api/exerciseListAPI.py | 1 -
mc_backend/mcserver/app/api/rawTextAPI.py | 8 +-
.../mcserver/app/api/textcomplexityAPI.py | 5 +-
mc_backend/mcserver/app/api/vocabularyAPI.py | 14 +-
mc_backend/mcserver/app/models.py | 214 +---
.../app/services/annotationService.py | 2 +-
.../mcserver/app/services/corpusService.py | 31 +-
.../mcserver/app/services/databaseService.py | 14 +-
.../mcserver/app/services/fileService.py | 6 +-
.../mcserver/app/services/frequencyService.py | 2 +-
.../app/services/textComplexityService.py | 4 +-
.../mcserver/app/services/textService.py | 2 +-
mc_backend/mcserver/config.py | 9 +-
mc_backend/mcserver/mcserver_api.yaml | 361 ++++++-
mc_backend/mocks.py | 25 +-
mc_backend/openapi/.dockerignore | 72 ++
mc_backend/openapi/.gitignore | 66 ++
mc_backend/openapi/.openapi-generator-ignore | 23 +
mc_backend/openapi/.openapi-generator/VERSION | 1 +
mc_backend/openapi/.travis.yml | 14 +
mc_backend/openapi/Dockerfile | 16 +
mc_backend/openapi/README.md | 49 +
mc_backend/openapi/git_push.sh | 58 ++
mc_backend/openapi/openapi_server/__init__.py | 0
mc_backend/openapi/openapi_server/__main__.py | 18 +
.../openapi_server/controllers/__init__.py | 0
.../controllers/default_controller.py | 94 ++
.../controllers/security_controller_.py | 3 +
mc_backend/openapi/openapi_server/encoder.py | 20 +
.../openapi/openapi_server/models/__init__.py | 19 +
.../openapi_server/models/annis_response.py | 238 +++++
.../annis_response_frequency_analysis.py | 122 +++
.../openapi_server/models/base_model_.py | 69 ++
.../openapi/openapi_server/models/corpus.py | 236 +++++
.../openapi/openapi_server/models/exercise.py | 522 ++++++++++
.../openapi_server/models/exercise_all_of.py | 294 ++++++
.../openapi_server/models/exercise_base.py | 262 +++++
.../openapi_server/models/graph_data.py | 186 ++++
.../models/inline_response200.py | 354 +++++++
.../inline_response200_frequency_analysis.py | 122 +++
.../inline_response200_text_complexity.py | 462 +++++++++
.../openapi_server/models/learning_result.py | 672 +++++++++++++
.../openapi/openapi_server/models/link.py | 186 ++++
.../openapi/openapi_server/models/node.py | 360 +++++++
.../openapi/openapi_server/models/solution.py | 92 ++
.../openapi_server/models/solution_element.py | 156 +++
.../openapi_server/models/text_complexity.py | 462 +++++++++
.../openapi_server/models/update_info.py | 132 +++
.../openapi_server/openapi/openapi.yaml | 949 ++++++++++++++++++
.../openapi/openapi_server/test/__init__.py | 16 +
.../test/test_default_controller.py | 123 +++
.../openapi/openapi_server/typing_utils.py | 32 +
mc_backend/openapi/openapi_server/util.py | 142 +++
mc_backend/openapi/requirements.txt | 10 +
mc_backend/openapi/setup.py | 39 +
mc_backend/openapi/test-requirements.txt | 4 +
mc_backend/openapi/tox.ini | 11 +
mc_backend/openapi_generator.py | 15 +
mc_backend/tests.py | 271 +++--
66 files changed, 7283 insertions(+), 470 deletions(-)
create mode 100644 mc_backend/openapi/.dockerignore
create mode 100644 mc_backend/openapi/.gitignore
create mode 100644 mc_backend/openapi/.openapi-generator-ignore
create mode 100644 mc_backend/openapi/.openapi-generator/VERSION
create mode 100644 mc_backend/openapi/.travis.yml
create mode 100644 mc_backend/openapi/Dockerfile
create mode 100644 mc_backend/openapi/README.md
create mode 100644 mc_backend/openapi/git_push.sh
create mode 100644 mc_backend/openapi/openapi_server/__init__.py
create mode 100644 mc_backend/openapi/openapi_server/__main__.py
create mode 100644 mc_backend/openapi/openapi_server/controllers/__init__.py
create mode 100644 mc_backend/openapi/openapi_server/controllers/default_controller.py
create mode 100644 mc_backend/openapi/openapi_server/controllers/security_controller_.py
create mode 100644 mc_backend/openapi/openapi_server/encoder.py
create mode 100644 mc_backend/openapi/openapi_server/models/__init__.py
create mode 100644 mc_backend/openapi/openapi_server/models/annis_response.py
create mode 100644 mc_backend/openapi/openapi_server/models/annis_response_frequency_analysis.py
create mode 100644 mc_backend/openapi/openapi_server/models/base_model_.py
create mode 100644 mc_backend/openapi/openapi_server/models/corpus.py
create mode 100644 mc_backend/openapi/openapi_server/models/exercise.py
create mode 100644 mc_backend/openapi/openapi_server/models/exercise_all_of.py
create mode 100644 mc_backend/openapi/openapi_server/models/exercise_base.py
create mode 100644 mc_backend/openapi/openapi_server/models/graph_data.py
create mode 100644 mc_backend/openapi/openapi_server/models/inline_response200.py
create mode 100644 mc_backend/openapi/openapi_server/models/inline_response200_frequency_analysis.py
create mode 100644 mc_backend/openapi/openapi_server/models/inline_response200_text_complexity.py
create mode 100644 mc_backend/openapi/openapi_server/models/learning_result.py
create mode 100644 mc_backend/openapi/openapi_server/models/link.py
create mode 100644 mc_backend/openapi/openapi_server/models/node.py
create mode 100644 mc_backend/openapi/openapi_server/models/solution.py
create mode 100644 mc_backend/openapi/openapi_server/models/solution_element.py
create mode 100644 mc_backend/openapi/openapi_server/models/text_complexity.py
create mode 100644 mc_backend/openapi/openapi_server/models/update_info.py
create mode 100644 mc_backend/openapi/openapi_server/openapi/openapi.yaml
create mode 100644 mc_backend/openapi/openapi_server/test/__init__.py
create mode 100644 mc_backend/openapi/openapi_server/test/test_default_controller.py
create mode 100644 mc_backend/openapi/openapi_server/typing_utils.py
create mode 100644 mc_backend/openapi/openapi_server/util.py
create mode 100644 mc_backend/openapi/requirements.txt
create mode 100644 mc_backend/openapi/setup.py
create mode 100644 mc_backend/openapi/test-requirements.txt
create mode 100644 mc_backend/openapi/tox.ini
create mode 100644 mc_backend/openapi_generator.py
diff --git a/mc_backend/.coveragerc b/mc_backend/.coveragerc
index badc0f3..652a2c4 100644
--- a/mc_backend/.coveragerc
+++ b/mc_backend/.coveragerc
@@ -7,6 +7,7 @@ omit =
*/site-packages/*
*/migrations/*
# cannot run tests for files that are generated and updated automatically
+ */openapi/*
*/models_auto.py
parallel = True
diff --git a/mc_backend/README.md b/mc_backend/README.md
index 3b0c197..e29a4d7 100644
--- a/mc_backend/README.md
+++ b/mc_backend/README.md
@@ -43,6 +43,10 @@ To autogenerate a new migration script:
----------------------------------------------------------------
+# Models
+To generate class structures for this project automatically:
+1. Install OpenAPI Generator (using, e.g., `brew install openapi-generator`).
+2. Run: `openapi-generator generate -i ./mcserver/mcserver_api.yaml -g python-flask -o ./openapi/ && python openapi_generator.py`.
# Testing
To check the coverage of the current tests, run
`coverage run --rcfile=.coveragerc tests.py && coverage combine && coverage report -m`.
diff --git a/mc_backend/csm/app/api/corpusStorageManagerAPI.py b/mc_backend/csm/app/api/corpusStorageManagerAPI.py
index 3a24a61..678cc84 100644
--- a/mc_backend/csm/app/api/corpusStorageManagerAPI.py
+++ b/mc_backend/csm/app/api/corpusStorageManagerAPI.py
@@ -36,9 +36,9 @@ class CorpusStorageManagerAPI(Resource):
args: Dict = flask.request.args
cts_urn: str = args["urn"]
ar: AnnisResponse = CorpusService.get_corpus(cts_urn=cts_urn, is_csm=True)
- if not ar.nodes:
+ if not ar.graph_data.nodes:
abort(404)
- return NetworkService.make_json_response(ar.__dict__)
+ return NetworkService.make_json_response(ar.to_dict())
def post(self):
"""Given the relevant corpus data, gives back search results as graph data."""
diff --git a/mc_backend/csm/app/api/subgraphAPI.py b/mc_backend/csm/app/api/subgraphAPI.py
index 0c3be6c..2f7a527 100644
--- a/mc_backend/csm/app/api/subgraphAPI.py
+++ b/mc_backend/csm/app/api/subgraphAPI.py
@@ -1,12 +1,9 @@
import json
from typing import Dict, List
-
import flask
from flask_restful import Resource
from flask_restful.reqparse import RequestParser
-
-from mcserver.app.models import ExerciseData, GraphData, Solution, SolutionElement, AnnisResponse
-
+from mcserver.app.models import ExerciseData, GraphData, Solution, AnnisResponse, make_solution_element_from_salt_id
from mcserver.app.services import CorpusService, AnnotationService, NetworkService
@@ -30,7 +27,7 @@ class SubgraphAPI(Resource):
ctx_left: int = int(args["ctx_left"])
ctx_right: int = int(args["ctx_right"])
ar: AnnisResponse = CorpusService.get_subgraph(urn, aql, ctx_left, ctx_right, is_csm=True)
- return NetworkService.make_json_response(ar.__dict__)
+ return NetworkService.make_json_response(ar.to_dict())
def post(self):
""" Returns subgraph data for a given CTS URN and AQL. """
@@ -45,9 +42,9 @@ class SubgraphAPI(Resource):
for aql in aqls:
node_ids: List[str] = CorpusService.find_matches(cts_urn, aql, is_csm=True)
for node_id in node_ids:
- gd: GraphData = AnnotationService.get_single_subgraph(disk_urn, [node_id], ctx_left, ctx_right,
- is_csm=True)
- exercise_data_list.append(ExerciseData(graph=gd, uri="",
- solutions=[Solution(target=SolutionElement(salt_id=node_id))]))
+ gd: GraphData = AnnotationService.get_single_subgraph(
+ disk_urn, [node_id], ctx_left, ctx_right, is_csm=True)
+ exercise_data_list.append(ExerciseData(
+ graph=gd, uri="", solutions=[Solution(target=make_solution_element_from_salt_id(node_id))]))
ret_val: List[dict] = [x.serialize() for x in exercise_data_list]
return NetworkService.make_json_response(ret_val)
diff --git a/mc_backend/csm/app/api/textcomplexityAPI.py b/mc_backend/csm/app/api/textcomplexityAPI.py
index 021fa52..41c3b4a 100644
--- a/mc_backend/csm/app/api/textcomplexityAPI.py
+++ b/mc_backend/csm/app/api/textcomplexityAPI.py
@@ -24,7 +24,6 @@ class TextComplexityAPI(Resource):
urn: str = args["urn"]
measure: str = args["measure"]
ar_dict: dict = args.get("annis_response", None)
- ar: AnnisResponse = AnnisResponse(json_dict=ar_dict) if ar_dict else CorpusService.get_corpus(urn, is_csm=True)
- gd: GraphData = GraphData(json_dict=ar.__dict__)
- tc: TextComplexity = TextComplexityService.text_complexity(measure, urn, True, gd)
- return NetworkService.make_json_response(tc.__dict__)
+ ar: AnnisResponse = AnnisResponse.from_dict(ar_dict) if ar_dict else CorpusService.get_corpus(urn, is_csm=True)
+ tc: TextComplexity = TextComplexityService.text_complexity(measure, urn, True, ar.graph_data)
+ return NetworkService.make_json_response(tc.to_dict())
diff --git a/mc_backend/mcserver/app/__init__.py b/mc_backend/mcserver/app/__init__.py
index 76f2e91..925acfb 100644
--- a/mc_backend/mcserver/app/__init__.py
+++ b/mc_backend/mcserver/app/__init__.py
@@ -5,23 +5,24 @@ import sys
from logging.handlers import RotatingFileHandler
from threading import Thread
from time import strftime
-from typing import Type, List
+from typing import Type
import connexion
import flask
+import open_alchemy
from connexion import FlaskApp
from flask import Flask, got_request_exception, request, Response, send_from_directory
from flask_cors import CORS
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from open_alchemy import init_yaml
-
from mcserver.config import Config
db: SQLAlchemy = SQLAlchemy() # session_options={"autocommit": True}
migrate: Migrate = Migrate(directory=Config.MIGRATIONS_DIRECTORY)
-# do this _BEFORE_ you add any APIs to your application
-init_yaml(Config.API_SPEC_FILE_PATH, base=db.Model,
- models_filename=os.path.join(Config.MC_SERVER_DIRECTORY, "models_auto.py"))
+if not hasattr(open_alchemy.models, Config.DATABASE_TABLE_CORPUS):
+ # do this _BEFORE_ you add any APIs to your application
+ init_yaml(Config.API_SPEC_YAML_FILE_PATH, base=db.Model,
+ models_filename=os.path.join(Config.MC_SERVER_DIRECTORY, "models_auto.py"))
def apply_event_handlers(app: FlaskApp):
@@ -78,7 +79,7 @@ def init_app_common(cfg: Type[Config] = Config, is_csm: bool = False) -> Flask:
connexion_app: FlaskApp = connexion.FlaskApp(
__name__, port=(cfg.CORPUS_STORAGE_MANAGER_PORT if is_csm else cfg.HOST_PORT),
specification_dir=Config.MC_SERVER_DIRECTORY)
- connexion_app.add_api(Config.API_SPEC_FILE_PATH, arguments={'title': 'Machina Callida Backend REST API'})
+ connexion_app.add_api(Config.API_SPEC_YAML_FILE_PATH, arguments={'title': 'Machina Callida Backend REST API'})
apply_event_handlers(connexion_app)
app: Flask = connexion_app.app
# allow CORS requests for all API routes
@@ -87,11 +88,11 @@ def init_app_common(cfg: Type[Config] = Config, is_csm: bool = False) -> Flask:
app.app_context().push()
db.init_app(app)
migrate.init_app(app, db)
+ if is_csm or cfg.TESTING:
+ db.create_all()
if is_csm:
from mcserver.app.services.databaseService import DatabaseService
DatabaseService.init_db_alembic()
- if is_csm or cfg.TESTING:
- db.create_all()
from mcserver.app.services.textService import TextService
TextService.init_proper_nouns_list()
TextService.init_stop_words_latin()
@@ -119,7 +120,6 @@ def log_exception(sender_app: Flask, exception, **extra):
exception -- the exception to be logged
**extra -- any additional arguments
"""
- # TODO: RETURN ERROR IN JSON FORMAT
sender_app.logger.exception(f"ERROR for {flask.request.url}")
diff --git a/mc_backend/mcserver/app/api/exerciseAPI.py b/mc_backend/mcserver/app/api/exerciseAPI.py
index 5675c18..21d15fe 100644
--- a/mc_backend/mcserver/app/api/exerciseAPI.py
+++ b/mc_backend/mcserver/app/api/exerciseAPI.py
@@ -8,7 +8,7 @@ from connexion.lifecycle import ConnexionResponse
from flask import Response
from mcserver.app import db
from mcserver.app.models import ExerciseType, Solution, ExerciseData, AnnisResponse, Phenomenon, TextComplexity, \
- TextComplexityMeasure, ResourceType, ExerciseMC
+ TextComplexityMeasure, ResourceType, ExerciseMC, GraphData
from mcserver.app.services import AnnotationService, CorpusService, NetworkService, TextComplexityService
from mcserver.config import Config
from mcserver.models_auto import Exercise, TExercise, UpdateInfo
@@ -31,7 +31,7 @@ def get(eid: str) -> Union[Response, ConnexionResponse]:
if exercise is None:
return connexion.problem(404, Config.ERROR_TITLE_NOT_FOUND, Config.ERROR_MESSAGE_EXERCISE_NOT_FOUND)
ar: AnnisResponse = CorpusService.get_corpus(cts_urn=exercise.urn, is_csm=False)
- if not ar.nodes:
+ if not ar.graph_data.nodes:
return connexion.problem(404, Config.ERROR_TITLE_NOT_FOUND, Config.ERROR_MESSAGE_CORPUS_NOT_FOUND)
exercise.last_access_time = datetime.utcnow().timestamp()
db.session.commit()
@@ -40,7 +40,7 @@ def get(eid: str) -> Union[Response, ConnexionResponse]:
ar.uri = NetworkService.get_exercise_uri(exercise)
ar.exercise_id = exercise.eid
ar.exercise_type = exercise_type.value
- return NetworkService.make_json_response(ar.__dict__)
+ return NetworkService.make_json_response(ar.to_dict())
def get_graph_data(title: str, conll_string_or_urn: str, aqls: List[str], exercise_type: ExerciseType,
@@ -79,7 +79,7 @@ def make_new_exercise(conll: str, correct_feedback: str, exercise_type: str, gen
# create a response
return AnnisResponse(
solutions=json.loads(new_exercise.solutions), uri=f"{Config.SERVER_URI_FILE}/{new_exercise.eid}",
- exercise_id=xml_guid)
+ exercise_id=xml_guid, graph_data=GraphData(links=[], nodes=[]))
def map_exercise_data_to_database(exercise_data: ExerciseData, exercise_type: str, instructions: str, xml_guid: str,
@@ -93,7 +93,7 @@ def map_exercise_data_to_database(exercise_data: ExerciseData, exercise_type: st
# add content to solutions
solutions: List[Solution] = adjust_solutions(exercise_data=exercise_data, solutions=solutions,
exercise_type=exercise_type)
- quiz_solutions: str = json.dumps([x.serialize() for x in solutions])
+ quiz_solutions: str = json.dumps([x.to_dict() for x in solutions])
tc: TextComplexity = TextComplexityService.text_complexity(TextComplexityMeasure.all.name, urn, False,
exercise_data.graph)
new_exercise: Exercise = ExerciseMC.from_dict(
@@ -130,7 +130,7 @@ def post(exercise_data: dict) -> Union[Response, ConnexionResponse]:
return connexion.problem(500, Config.ERROR_TITLE_INTERNAL_SERVER_ERROR,
Config.ERROR_MESSAGE_INTERNAL_SERVER_ERROR)
solutions_dict_list: List[Dict] = response["solutions"]
- solutions: List[Solution] = [Solution(json_dict=x) for x in solutions_dict_list]
+ solutions: List[Solution] = [Solution.from_dict(x) for x in solutions_dict_list]
ar: AnnisResponse = make_new_exercise(
conll=response["conll"], correct_feedback=exercise_data.get("correct_feedback", ""),
exercise_type=exercise_data["type"], general_feedback=exercise_data.get("general_feedback", ""),
@@ -140,4 +140,4 @@ def post(exercise_data: dict) -> Union[Response, ConnexionResponse]:
search_values=exercise_data["search_values"], solutions=solutions,
type_translation=exercise_data.get("type_translation", ""), urn=urn,
work_author=exercise_data.get("work_author", ""), work_title=exercise_data.get("work_title", ""))
- return NetworkService.make_json_response(ar.__dict__)
+ return NetworkService.make_json_response(ar.to_dict())
diff --git a/mc_backend/mcserver/app/api/exerciseListAPI.py b/mc_backend/mcserver/app/api/exerciseListAPI.py
index 36a91b4..bb7a788 100644
--- a/mc_backend/mcserver/app/api/exerciseListAPI.py
+++ b/mc_backend/mcserver/app/api/exerciseListAPI.py
@@ -1,5 +1,4 @@
"""The corpus list API. Add it to your REST API to provide users with a list of metadata for available texts."""
-from datetime import datetime
from typing import List, Set
import conllu
diff --git a/mc_backend/mcserver/app/api/rawTextAPI.py b/mc_backend/mcserver/app/api/rawTextAPI.py
index 5ebf6f1..51890f1 100644
--- a/mc_backend/mcserver/app/api/rawTextAPI.py
+++ b/mc_backend/mcserver/app/api/rawTextAPI.py
@@ -18,8 +18,8 @@ class RawTextAPI(Resource):
args = self.reqparse.parse_args()
urn: str = args["urn"]
ar: AnnisResponse = CorpusService.get_corpus(cts_urn=urn, is_csm=False)
- if not ar.nodes:
+ if not ar.graph_data.nodes:
abort(404)
- gd: GraphData = GraphData(json_dict=ar.__dict__)
- ar.text_complexity = TextComplexityService.text_complexity(TextComplexityMeasure.all.name, urn, False, gd)
- return NetworkService.make_json_response(ar.__dict__)
+ ar.text_complexity = TextComplexityService.text_complexity(TextComplexityMeasure.all.name, urn, False,
+ ar.graph_data).to_dict()
+ return NetworkService.make_json_response(ar.to_dict())
diff --git a/mc_backend/mcserver/app/api/textcomplexityAPI.py b/mc_backend/mcserver/app/api/textcomplexityAPI.py
index e0ee427..7e38db5 100644
--- a/mc_backend/mcserver/app/api/textcomplexityAPI.py
+++ b/mc_backend/mcserver/app/api/textcomplexityAPI.py
@@ -19,6 +19,5 @@ class TextComplexityAPI(Resource):
urn: str = args["urn"]
measure: str = args["measure"]
ar: AnnisResponse = CorpusService.get_corpus(urn, is_csm=False)
- gd: GraphData = GraphData(json_dict=ar.__dict__)
- tc: TextComplexity = TextComplexityService.text_complexity(measure, urn, False, gd)
- return NetworkService.make_json_response(tc.__dict__)
+ tc: TextComplexity = TextComplexityService.text_complexity(measure, urn, False, ar.graph_data)
+ return NetworkService.make_json_response(tc.to_dict())
diff --git a/mc_backend/mcserver/app/api/vocabularyAPI.py b/mc_backend/mcserver/app/api/vocabularyAPI.py
index 6aa076a..dd29667 100644
--- a/mc_backend/mcserver/app/api/vocabularyAPI.py
+++ b/mc_backend/mcserver/app/api/vocabularyAPI.py
@@ -33,17 +33,17 @@ class VocabularyAPI(Resource):
for char in string.punctuation:
vocabulary_set.add(char)
ar: AnnisResponse = CorpusService.get_corpus(cts_urn=urn, is_csm=False)
- graph_data: GraphData = GraphData(json_dict=ar.__dict__)
if show_oov:
# this is not a request for sentence ranges, so we can take a shortcut
- for node in graph_data.nodes:
+ for node in ar.graph_data.nodes:
if not is_match(target_lemma=node.udep_lemma, vocabulary_set=vocabulary_set):
node.is_oov = True
- ar: AnnisResponse = AnnisResponse(solutions=[], uri="", exercise_id="", graph_data=graph_data)
- gd: GraphData = GraphData(json_dict=ar.__dict__)
- ar.text_complexity = TextComplexityService.text_complexity(TextComplexityMeasure.all.name, urn, False, gd)
- return NetworkService.make_json_response(ar.__dict__)
- sentences: List[Sentence] = check_vocabulary(graph_data, vocabulary_set)
+ ar: AnnisResponse = AnnisResponse(
+ solutions=[], uri="", exercise_id="", graph_data=ar.graph_data)
+ ar.text_complexity = TextComplexityService.text_complexity(TextComplexityMeasure.all.name, urn, False,
+ ar.graph_data).to_dict()
+ return NetworkService.make_json_response(ar.to_dict())
+ sentences: List[Sentence] = check_vocabulary(ar.graph_data, vocabulary_set)
return NetworkService.make_json_response([x.__dict__ for x in sentences])
diff --git a/mc_backend/mcserver/app/models.py b/mc_backend/mcserver/app/models.py
index 9259766..2904e7a 100644
--- a/mc_backend/mcserver/app/models.py
+++ b/mc_backend/mcserver/app/models.py
@@ -2,9 +2,25 @@
from typing import Dict, List, Union, Any
from enum import Enum
import typing
-from sqlalchemy.orm.state import InstanceState
from mcserver.config import Config
from mcserver.models_auto import TExercise, Corpus, TCorpus, Exercise, TLearningResult, LearningResult
+from openapi.openapi_server.models import SolutionElement, Solution, Link, Node, TextComplexity, AnnisResponse, \
+ GraphData
+
+AnnisResponse = AnnisResponse
+GraphData = GraphData
+LinkMC = Link
+NodeMC = Node
+SolutionElement = SolutionElement
+TextComplexity = TextComplexity
+
+
+def make_solution_element_from_salt_id(salt_id: str) -> SolutionElement:
+ """Extracts necessary information from a SALT ID string to create a solution element."""
+ salt_parts: List[str] = salt_id.split("#")[-1].split("tok")
+ sentence_id = int(salt_parts[0].replace("sent", ""))
+ token_id = int(salt_parts[1].replace("tok", ""))
+ return SolutionElement(content="", salt_id=salt_id, sentence_id=sentence_id, token_id=token_id)
class Case(Enum):
@@ -369,142 +385,6 @@ class XapiStatement:
context=self.context.serialize(), result=self.result.serialize())
-class LinkMC:
- annis_component_name: str
- annis_component_type: str
- source: str
- target: str
- udep_deprel: str
-
- def __init__(self, annis_component_name: str = "", annis_component_type: str = "", source: str = "",
- target: str = "", udep_deprel: str = None, json_dict: dict = None):
- if json_dict:
- self.__dict__ = json_dict
- else:
- self.annis_component_name = annis_component_name
- self.annis_component_type = annis_component_type
- self.source = source
- self.target = target
- if udep_deprel is not None:
- self.udep_deprel = udep_deprel
-
- def __eq__(self, other):
- if isinstance(other, LinkMC):
- for key in other.__dict__:
- if not isinstance(other.__dict__[key], InstanceState) and other.__dict__[key] != self.__dict__[key]:
- return False
- return True
- else:
- return False
-
-
-class NodeMC:
- annis_node_name: str
- annis_node_type: str
- annis_tok: str
- annis_type: str
- id: str
- is_oov: bool
- udep_lemma: str
- udep_upostag: str
- udep_xpostag: str
- udep_feats: str
- solution: str
-
- def __init__(self, annis_node_name: str = "", annis_node_type: str = "", annis_tok: str = "", annis_type: str = "",
- node_id: str = "", udep_upostag: str = "", udep_xpostag: str = "", udep_feats: str = "",
- solution: str = "", udep_lemma: str = None, is_oov: bool = None, json_dict: dict = None):
- if json_dict:
- self.__dict__ = json_dict
- else:
- self.annis_node_name = annis_node_name
- self.annis_node_type = annis_node_type
- self.annis_tok = annis_tok
- self.annis_type = annis_type
- self.id = node_id
- if udep_lemma is not None:
- self.udep_lemma = udep_lemma
- self.udep_upostag = udep_upostag
- self.udep_xpostag = udep_xpostag
- self.udep_feats = udep_feats
- self.solution = solution
- self.is_oov = is_oov
-
- def __eq__(self, other):
- if isinstance(other, NodeMC):
- return self.annis_node_name == other.annis_node_name and self.annis_node_type == other.annis_node_type and self.annis_tok == other.annis_tok and self.annis_type == other.annis_type and self.id == other.id and self.udep_lemma == other.udep_lemma and self.udep_upostag == other.udep_upostag and self.udep_xpostag == other.udep_xpostag and self.solution == other.solution
- else:
- return False
-
-
-class GraphData:
- directed: bool
- graph: Dict
- links: List[LinkMC]
- multigraph: bool
- nodes: List[NodeMC]
-
- def __init__(self, directed: bool = None, graph: Dict = None, links: List[LinkMC] = None, multigraph: bool = None,
- nodes: List[NodeMC] = None, json_dict: dict = None):
- if json_dict is None:
- self.directed = directed
- self.graph = graph
- self.links = links
- self.multigraph = multigraph
- self.nodes: List[NodeMC] = nodes
- else:
- self.directed = json_dict["directed"]
- self.graph = json_dict["graph"]
- self.multigraph = json_dict["multigraph"]
- self.links = [LinkMC(json_dict=x) for x in json_dict["links"]]
- self.nodes = [NodeMC(json_dict=x) for x in json_dict["nodes"]]
-
- def serialize(self) -> dict:
- ret_val: dict = self.__dict__.copy()
- ret_val["links"] = [x.__dict__ for x in self.links]
- ret_val["nodes"] = [x.__dict__ for x in self.nodes]
- return ret_val
-
-
-class SolutionElement:
- sentence_id: int
- token_id: int
- content: str
- salt_id: str
-
- def __init__(self, sentence_id: int = 0, token_id: int = 0, content: str = None, json_dict: Dict = None,
- salt_id: str = None):
- if json_dict:
- self.__dict__ = json_dict
- elif salt_id:
- salt_parts: List[str] = salt_id.split("#")[-1].split("tok")
- self.sentence_id = int(salt_parts[0].replace("sent", ""))
- self.token_id = int(salt_parts[1].replace("tok", ""))
- self.salt_id = salt_id
- self.content = content
- else:
- self.sentence_id = sentence_id
- self.token_id = token_id
- self.content = content
-
-
-class Solution:
- target: SolutionElement
- value: SolutionElement
-
- def __init__(self, target: SolutionElement = SolutionElement(), value: SolutionElement = SolutionElement(),
- json_dict: dict = None):
- if json_dict:
- self.target = SolutionElement(json_dict=json_dict["target"])
- self.value = SolutionElement(json_dict=json_dict["value"])
- else:
- self.target = target
- self.value = value
-
- def serialize(self) -> dict:
- return dict(target=self.target.__dict__, value=self.value.__dict__)
-
-
class ExerciseData:
"""Model for exercise data. Holds textual annotations as a graph"""
graph: GraphData
@@ -514,19 +394,19 @@ class ExerciseData:
def __init__(self, graph: GraphData = None, uri: str = None, solutions: List[Solution] = None,
json_dict: dict = None):
if json_dict is not None:
- self.graph = GraphData(json_dict=json_dict["graph"])
+ self.graph = GraphData.from_dict(json_dict["graph"])
self.uri = json_dict["uri"]
- self.solutions = [Solution(json_dict=solution_dict) for solution_dict in json_dict["solutions"]]
+ self.solutions = [Solution.from_dict(solution_dict) for solution_dict in json_dict["solutions"]]
else:
self.graph = graph
self.solutions = [] if solutions is None else solutions
self.uri = uri
def serialize(self) -> dict:
- ret_val: dict = {"solutions": [x.serialize() for x in self.solutions],
+ ret_val: dict = {"solutions": [x.to_dict() for x in self.solutions],
"graph": dict(multigraph=self.graph.multigraph, directed=self.graph.directed,
- graph=self.graph.graph, nodes=[x.__dict__ for x in self.graph.nodes],
- links=[x.__dict__ for x in self.graph.links]), "uri": self.uri}
+ graph=self.graph.graph, nodes=[x.to_dict() for x in self.graph.nodes],
+ links=[x.to_dict() for x in self.graph.links]), "uri": self.uri}
return ret_val
@@ -618,53 +498,3 @@ class FrequencyAnalysis(List[FrequencyItem]):
def serialize(self) -> List[dict]:
return [x.serialize() for x in self]
-
-
-class AnnisResponse:
-
- def __init__(self, solutions: List[Solution] = None, uri: str = "", exercise_id: str = "",
- graph_data: GraphData = None, frequency_analysis: FrequencyAnalysis = None,
- text_complexity: dict = None, exercise_type: ExerciseType = None,
- json_dict: dict = None):
- if json_dict is None:
- self.directed: bool = graph_data.directed if graph_data else False
- self.exercise_id: str = exercise_id
- self.exercise_type: str = exercise_type.value if exercise_type else ""
- self.frequency_analysis: List[dict] = [] if frequency_analysis is None else frequency_analysis.serialize()
- self.graph: dict = graph_data.graph if graph_data else {}
- self.links: List[dict] = [x.__dict__ for x in graph_data.links] if graph_data else []
- self.multigraph: bool = graph_data.multigraph if graph_data else False
- self.nodes: List[dict] = [x.__dict__ for x in graph_data.nodes] if graph_data else []
- self.solutions: List[Solution] = solutions
- self.text_complexity: dict = text_complexity if text_complexity else {}
- self.uri: str = uri
- else:
- self.__dict__ = json_dict
-
-
-class TextComplexity(dict):
- def __init__(self, n_w: int = 0, pos: int = 0, n_sent: int = 0, avg_w_per_sent: float = 0, avg_w_len: float = 0,
- n_punct: int = 0, n_types: int = 0, lex_den: float = 0, n_clause: int = 0, n_subclause: int = 0,
- n_abl_abs: int = 0, n_gerund: int = 0, n_inf: int = 0, n_part: int = 0, all: float = 0,
- json_dict: dict = None):
- super(TextComplexity).__init__()
- if json_dict is None:
- self.n_w: int = n_w
- self.pos: int = pos
- self.n_sent: int = n_sent
- self.avg_w_per_sent: float = avg_w_per_sent
- self.avg_w_len: float = avg_w_len
- self.n_punct: int = n_punct
- self.n_types: int = n_types
- self.lex_den: float = lex_den
- self.n_clause: int = n_clause
- self.n_subclause: int = n_subclause
- self.n_abl_abs: int = n_abl_abs
- self.n_gerund: int = n_gerund
- self.n_inf: int = n_inf
- self.n_part: int = n_part
- self.all: float = all
- else:
- self.update(json_dict)
- for key in json_dict:
- self.__setattr__(key, json_dict[key])
diff --git a/mc_backend/mcserver/app/services/annotationService.py b/mc_backend/mcserver/app/services/annotationService.py
index e510422..34ad920 100644
--- a/mc_backend/mcserver/app/services/annotationService.py
+++ b/mc_backend/mcserver/app/services/annotationService.py
@@ -252,7 +252,7 @@ class AnnotationService:
""" Maps a node dictionary to the native NodeMC class. """
return NodeMC(annis_node_name=node["annis::node_name"], annis_node_type=node["annis::node_type"],
annis_tok=node.get("annis::tok", None), annis_type=node.get("annis::type", None),
- node_id=str(node.get("id", "")), udep_lemma=node.get("udep::lemma", None),
+ id=str(node.get("id", "")), udep_lemma=node.get("udep::lemma", None),
udep_upostag=node.get("udep::upostag", None), udep_xpostag=node.get("udep::xpostag", None),
udep_feats=node.get("udep::feats", None))
diff --git a/mc_backend/mcserver/app/services/corpusService.py b/mc_backend/mcserver/app/services/corpusService.py
index 4bd1833..c54384c 100644
--- a/mc_backend/mcserver/app/services/corpusService.py
+++ b/mc_backend/mcserver/app/services/corpusService.py
@@ -13,10 +13,9 @@ from lxml import etree
from networkx import graph, MultiDiGraph
from networkx.readwrite import json_graph
from requests import HTTPError
-
from mcserver.app import db
from mcserver.app.models import CitationLevel, GraphData, Solution, ExerciseType, Phenomenon, FrequencyAnalysis, \
- AnnisResponse, SolutionElement, CorpusMC
+ AnnisResponse, CorpusMC, make_solution_element_from_salt_id
from mcserver.app.services import AnnotationService, XMLservice, TextService, FileService, FrequencyService, \
CustomCorpusService
from mcserver.config import Config
@@ -97,7 +96,7 @@ class CorpusService:
# get graph data for further processing
graph_data_raw: dict = CorpusService.get_graph_data(cts_urn)
if not graph_data_raw:
- return AnnisResponse()
+ return AnnisResponse(graph_data=GraphData(links=[], nodes=[]))
graph_data: GraphData = AnnotationService.map_graph_data(graph_data_raw)
ar: AnnisResponse = AnnisResponse(solutions=[], uri="", exercise_id="", graph_data=graph_data)
return ar
@@ -105,14 +104,13 @@ class CorpusService:
# there is actually no text, only a URN, so we need to get it ourselves
url: str = f"{Config.INTERNET_PROTOCOL}{Config.HOST_IP_CSM}:{Config.CORPUS_STORAGE_MANAGER_PORT}/"
response: requests.Response = requests.get(url, params=dict(urn=cts_urn))
- return AnnisResponse(json_dict=json.loads(response.text))
+ return AnnisResponse(graph_data=GraphData.from_dict(json.loads(response.text)))
@staticmethod
def get_frequency_analysis(urn: str, is_csm: bool) -> FrequencyAnalysis:
""" Collects frequency statistics for various combinations of linguistic annotations in a corpus. """
if is_csm:
ar: AnnisResponse = CorpusService.get_corpus(urn, is_csm)
- gd: GraphData = GraphData(json_dict=ar.__dict__)
search_phenomena: List[List[Phenomenon]] = []
for head_phenomenon in Phenomenon:
for base_phenomenon in Phenomenon:
@@ -126,7 +124,7 @@ class CorpusService:
fa += FrequencyService.add_case_frequencies(disk_urn, search_phenomenon)
elif search_phenomenon[0] in [Phenomenon.lemma, Phenomenon.partOfSpeech]:
fa += FrequencyService.add_generic_frequencies(disk_urn, search_phenomenon)
- FrequencyService.add_dependency_frequencies(gd, fa)
+ FrequencyService.add_dependency_frequencies(ar.graph_data, fa)
return FrequencyService.extract_case_values(fa)
else:
url: str = Config.INTERNET_PROTOCOL + f"{Config.HOST_IP_CSM}:{Config.CORPUS_STORAGE_MANAGER_PORT}" + \
@@ -195,21 +193,23 @@ class CorpusService:
# it's cloze or markWords; the solutions only have a target, no explicit value
if search_phenomena[0] == Phenomenon.dependency:
node_ids = [node_ids[i] for i in range(len(node_ids)) if i % 2 != 0]
- matches += [Solution(target=SolutionElement(salt_id=x)) for x in node_ids]
+ matches += [Solution(target=make_solution_element_from_salt_id(x)) for x in node_ids]
else:
- matches += [Solution(target=SolutionElement(salt_id=x)) for x in node_ids]
+ matches += [Solution(target=make_solution_element_from_salt_id(x)) for x in node_ids]
else:
# it's a matching exercise
if search_phenomena[0] == Phenomenon.dependency:
for i in range(len(node_ids)):
if i % 3 == 0:
- matches.append(Solution(target=SolutionElement(salt_id=node_ids[i + 1]),
- value=SolutionElement(salt_id=node_ids[i + 2])))
+ matches.append(Solution(
+ target=make_solution_element_from_salt_id(node_ids[i + 1]),
+ value=make_solution_element_from_salt_id(node_ids[i + 2])))
else:
for i in range(len(node_ids)):
if i % 2 == 0:
- matches.append(Solution(target=SolutionElement(salt_id=node_ids[i]),
- value=SolutionElement(salt_id=node_ids[i + 1])))
+ matches.append(
+ Solution(target=make_solution_element_from_salt_id(node_ids[i]),
+ value=make_solution_element_from_salt_id(node_ids[i + 1])))
from operator import attrgetter
matches.sort(key=attrgetter("target.sentence_id", "target.token_id"))
return matches
@@ -218,8 +218,7 @@ class CorpusService:
def get_raw_text(urn: str, is_csm: bool):
""" Retrieves the raw text for a corpus. """
ar: AnnisResponse = CorpusService.get_corpus(cts_urn=urn, is_csm=is_csm)
- graph_data: GraphData = GraphData(json_dict=ar.__dict__)
- text_raw = " ".join(x.annis_tok for x in graph_data.nodes)
+ text_raw = " ".join(x.annis_tok for x in ar.graph_data.nodes)
# remove the spaces before punctuation because, otherwise, the parser won't work correctly
return TextService.strip_whitespace(text_raw)
@@ -279,7 +278,7 @@ class CorpusService:
Config.SERVER_URI_CSM_SUBGRAPH
response: requests.Response = requests.get(url, params=dict(urn=disk_urn, aqls=aql,
ctx_left=ctx_left, ctx_right=ctx_right))
- return AnnisResponse(json_dict=json.loads(response.text))
+ return AnnisResponse.from_dict(json.loads(response.text))
@staticmethod
def init_graphannis_logging() -> None:
@@ -332,7 +331,7 @@ class CorpusService:
if "newpar" in x.metadata and not x.metadata["newpar"]:
del x.metadata["newpar"]
text_conll += x.serialize()
- return dict(graph_data_raw=graph_data_raw, solutions=[x.serialize() for x in solutions], conll=text_conll)
+ return dict(graph_data_raw=graph_data_raw, solutions=[x.to_dict() for x in solutions], conll=text_conll)
@staticmethod
def update_corpora():
diff --git a/mc_backend/mcserver/app/services/databaseService.py b/mc_backend/mcserver/app/services/databaseService.py
index d18b2a6..6d5d6eb 100644
--- a/mc_backend/mcserver/app/services/databaseService.py
+++ b/mc_backend/mcserver/app/services/databaseService.py
@@ -1,5 +1,5 @@
from datetime import datetime
-from typing import List, Dict, Any
+from typing import List, Dict
from flask import Flask
from flask_migrate import stamp, upgrade
@@ -7,8 +7,8 @@ import rapidjson as json
from sqlalchemy.exc import OperationalError
from mcserver.app import db
-from mcserver.app.models import CitationLevel, ResourceType, TextComplexityMeasure, \
- AnnisResponse, GraphData, TextComplexity
+from mcserver.app.models import CitationLevel, ResourceType, TextComplexityMeasure, AnnisResponse, GraphData, \
+ TextComplexity
from mcserver.app.services import CorpusService, CustomCorpusService, TextComplexityService
from mcserver.config import Config
from mcserver.models_auto import Corpus, Exercise, UpdateInfo
@@ -19,14 +19,15 @@ class DatabaseService:
@staticmethod
def check_corpus_list_age(app: Flask) -> None:
""" Checks whether the corpus list needs to be updated. If yes, it performs the update. """
+ app.logger.info("Corpus update started.")
ui_cts: UpdateInfo = db.session.query(UpdateInfo).filter_by(resource_type=ResourceType.cts_data.name).first()
db.session.commit()
if ui_cts is None:
+ app.logger.info("UpdateInfo not available!")
return
else:
ui_datetime: datetime = datetime.fromtimestamp(ui_cts.last_modified_time)
if (datetime.utcnow() - ui_datetime).total_seconds() > Config.INTERVAL_CORPUS_UPDATE:
- app.logger.info("Corpus update started.")
CorpusService.update_corpora()
ui_cts.last_modified_time = datetime.utcnow().timestamp()
db.session.commit()
@@ -34,7 +35,7 @@ class DatabaseService:
@staticmethod
def init_db_alembic() -> None:
- """ In Docker, the alembic version is not initially written to the database, so we need to set it manually. """
+ """In Docker, the alembic version is not initially written to the database, so we need to set it manually."""
if not db.engine.dialect.has_table(db.engine, Config.DATABASE_TABLE_ALEMBIC):
stamp(directory=Config.MIGRATIONS_DIRECTORY)
upgrade(directory=Config.MIGRATIONS_DIRECTORY)
@@ -107,8 +108,7 @@ class DatabaseService:
# manually add text complexity measures for old exercises
elif not exercise.text_complexity:
ar: AnnisResponse = CorpusService.get_corpus(exercise.urn, is_csm=is_csm)
- gd = GraphData(json_dict=ar.__dict__)
tc: TextComplexity = TextComplexityService.text_complexity(TextComplexityMeasure.all.name,
- exercise.urn, is_csm, gd)
+ exercise.urn, is_csm, ar.graph_data)
exercise.text_complexity = tc.all
db.session.commit()
diff --git a/mc_backend/mcserver/app/services/fileService.py b/mc_backend/mcserver/app/services/fileService.py
index e4c7b9e..9a70ac0 100644
--- a/mc_backend/mcserver/app/services/fileService.py
+++ b/mc_backend/mcserver/app/services/fileService.py
@@ -11,10 +11,8 @@ from docx import Document
from docx.text.paragraph import Paragraph
from docx.text.run import Run
from xhtml2pdf import pisa
-
from mcserver import Config
-from mcserver.app.models import DownloadableFile, FileType, Solution, ExerciseType, SolutionElement, \
- VocabularyCorpus
+from mcserver.app.models import DownloadableFile, FileType, Solution, ExerciseType, VocabularyCorpus, SolutionElement
from mcserver.app.services import TextService, XMLservice
from mcserver.models_auto import Exercise
@@ -118,7 +116,7 @@ class FileService:
"""Creates a temporary file for the exercise data, so the users can download it."""
existing_file: DownloadableFile = FileService.create_tmp_file(file_type, exercise.eid)
conll: List[TokenList] = conllu.parse(exercise.conll)
- solutions: List[Solution] = [Solution(json_dict=x) for x in json.loads(exercise.solutions)]
+ solutions: List[Solution] = [Solution.from_dict(x) for x in json.loads(exercise.solutions)]
if solution_indices is not None:
solutions = [solutions[x] for x in solution_indices]
# write the relevant content to the file
diff --git a/mc_backend/mcserver/app/services/frequencyService.py b/mc_backend/mcserver/app/services/frequencyService.py
index 6e3635a..4ffbee6 100644
--- a/mc_backend/mcserver/app/services/frequencyService.py
+++ b/mc_backend/mcserver/app/services/frequencyService.py
@@ -50,7 +50,7 @@ class FrequencyService:
values_to_fi_dict: Dict[str, FrequencyItem] = {}
for link in dep_links:
base_node: NodeMC = graph_data.nodes[id_to_node_dict[link.source]]
- if "udep_deprel" not in link.__dict__:
+ if not link.udep_deprel:
continue
dep: Dependency = dep_to_enum_dict[link.udep_deprel]
FrequencyService.add_frequency_item(base_node, dep, values_to_fi_dict, 1)
diff --git a/mc_backend/mcserver/app/services/textComplexityService.py b/mc_backend/mcserver/app/services/textComplexityService.py
index de3a2cb..12cca55 100644
--- a/mc_backend/mcserver/app/services/textComplexityService.py
+++ b/mc_backend/mcserver/app/services/textComplexityService.py
@@ -184,5 +184,5 @@ class TextComplexityService:
f"{Config.CORPUS_STORAGE_MANAGER_PORT}{Config.SERVER_URI_TEXT_COMPLEXITY}"
ar: AnnisResponse = AnnisResponse(graph_data=gd)
response: requests.Response = requests.post(url, data=json.dumps(
- dict(urn=urn, measure=TextComplexityMeasure.all.name, annis_response=ar.__dict__)))
- return TextComplexity(json_dict=json.loads(response.text))
+ dict(urn=urn, measure=TextComplexityMeasure.all.name, annis_response=ar.to_dict())))
+ return TextComplexity.from_dict(json.loads(response.text))
diff --git a/mc_backend/mcserver/app/services/textService.py b/mc_backend/mcserver/app/services/textService.py
index 97cd513..4dba444 100644
--- a/mc_backend/mcserver/app/services/textService.py
+++ b/mc_backend/mcserver/app/services/textService.py
@@ -42,7 +42,7 @@ class TextService:
@staticmethod
def get_solutions_by_index(exercise: Exercise, solution_indices: List[int] = None) -> List[Solution]:
""" If available, makes use of the solution indices to return only the wanted solutions. """
- available_solutions: List[Solution] = [Solution(json_dict=x) for x in json.loads(exercise.solutions)]
+ available_solutions: List[Solution] = [Solution.from_dict(x) for x in json.loads(exercise.solutions)]
if solution_indices is None:
return available_solutions
return [available_solutions[i] for i in solution_indices] if len(solution_indices) > 0 else []
diff --git a/mc_backend/mcserver/config.py b/mc_backend/mcserver/config.py
index 96397ab..eabf6c9 100644
--- a/mc_backend/mcserver/config.py
+++ b/mc_backend/mcserver/config.py
@@ -33,7 +33,9 @@ class Config(object):
TREEBANKS_PATH = os.path.join(ASSETS_DIRECTORY, "treebanks")
TREEBANKS_PROIEL_PATH = os.path.join(TREEBANKS_PATH, "proiel")
- API_SPEC_FILE_PATH = os.path.join(MC_SERVER_DIRECTORY, "mcserver_api.yaml")
+ API_SPEC_JSON_FILE_NAME = "openapi.json"
+ API_SPEC_JSON_FILE_PATH = os.path.join(MC_SERVER_DIRECTORY, API_SPEC_JSON_FILE_NAME)
+ API_SPEC_YAML_FILE_PATH = os.path.join(MC_SERVER_DIRECTORY, "mcserver_api.yaml")
AQL_CASE = "/.*Case=.*/"
AQL_DEP = "->dep"
AQL_DEPREL = "deprel"
@@ -130,7 +132,8 @@ class Config(object):
SERVER_URI_VOCABULARY = SERVER_URI_BASE + "vocabulary"
# END endpoints
SQLALCHEMY_DATABASE_URI = os.environ.get("DATABASE_URL") or DATABASE_URL_SQLITE
- SQLALCHEMY_ECHO = True
+ # BEWARE: if True, logs every single database statement executed by this application to STDOUT
+ SQLALCHEMY_ECHO = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
STATIC_EXERCISES_REPOSITORY_URL = "https://scm.cms.hu-berlin.de/callidus/machina-callida/-/archive/master/machina-callida-master.zip?path=mc_frontend%2Fsrc%2Fassets%2Fh5p"
STOP_WORDS_LATIN_PATH = os.path.join(CACHE_DIRECTORY, "stop_words_latin.json")
@@ -176,8 +179,6 @@ class TestingConfig(Config):
PRESERVE_CONTEXT_ON_EXCEPTION = False
SERVER_NAME = Config.HOST_IP_MCSERVER + ":{0}".format(Config.HOST_PORT)
SESSION_COOKIE_DOMAIN = False
- SIMULATE_CORPUS_NOT_FOUND = False
- SIMULATE_EMPTY_GRAPH = False
SIMULATE_HTTP_ERROR = False
SQLALCHEMY_DATABASE_URI = Config.DATABASE_URL_SQLITE
STATIC_EXERCISES_ZIP_FILE_PATH = os.path.join(Config.TMP_DIRECTORY, "static_exercises.zip")
diff --git a/mc_backend/mcserver/mcserver_api.yaml b/mc_backend/mcserver/mcserver_api.yaml
index 6179a69..1a33924 100644
--- a/mc_backend/mcserver/mcserver_api.yaml
+++ b/mc_backend/mcserver/mcserver_api.yaml
@@ -94,8 +94,11 @@ paths:
operationId: mcserver.app.api.exerciseAPI.get
responses:
200:
- description: Exercise data object
- # TODO: SPECIFY RESPONSE SCHEMA
+ description: Exercise data object, including a graph model for linguistic annotations.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/AnnisResponse'
parameters:
- name: eid
in: query
@@ -110,7 +113,10 @@ paths:
responses:
200:
description: Exercise data object
- # TODO: SPECIFY RESPONSE SCHEMA
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/AnnisResponse'
requestBody:
$ref: '#/components/requestBodies/ExerciseForm'
components:
@@ -145,6 +151,53 @@ components:
required:
- type
schemas:
+ AnnisResponse:
+ description: A response with graph data from ANNIS, possibly with additional data for exercises.
+ type: object
+ properties:
+ exercise_id:
+ type: string
+ description: Unique identifier (UUID) for the exercise.
+ example: 12345678-1234-5678-1234-567812345678
+ exercise_type:
+ type: string
+ description: Type of exercise, concerning interaction and layout.
+ example: ddwtos
+ frequency_analysis:
+ type: array
+ description: List of items with frequency data for linguistic phenomena.
+ items:
+ type: object
+ properties:
+ count:
+ type: integer
+ description: How often the given combination of values occurred.
+ example: 1
+ phenomena:
+ type: array
+ description: Labels for the phenomena described in this frequency entry.
+ example: []
+ items:
+ type: string
+ values:
+ type: array
+ description: Values for the phenomena described in this frequency entry.
+ example: []
+ items:
+ type: string
+ graph_data:
+ $ref: "#/components/schemas/GraphData"
+ solutions:
+ type: array
+ description: Correct solutions for this exercise.
+ items:
+ $ref: '#/components/schemas/Solution'
+ text_complexity:
+ $ref: '#/components/schemas/TextComplexity'
+ uri:
+ type: string
+ description: URI for accessing the exercise in this API.
+ example: /mc/api/v1.0/file/fd97630c-1f5a-4102-af56-20eb0babdfee
Corpus: # Object definition
description: Collection of texts.
type: object # Data type
@@ -251,6 +304,79 @@ components:
required:
- eid
- last_access_time
+ ExerciseBase:
+ description: Base data for creating and evaluating interactive exercises.
+ type: object
+ properties:
+ correct_feedback:
+ type: string
+ description: Feedback for successful completion of the exercise.
+ example: Well done!
+ default: ""
+ general_feedback:
+ type: string
+ description: Feedback for finishing the exercise.
+ example: You have finished the exercise.
+ default: ""
+ incorrect_feedback:
+ type: string
+ description: Feedback for failing to complete the exercise successfully.
+ example: Unfortunately, that answer is wrong.
+ default: ""
+ instructions:
+ type: string
+ description: Hints for how to complete the exercise.
+ example: Fill in the gaps!
+ default: ""
+ partially_correct_feedback:
+ type: string
+ description: Feedback for successfully completing certain parts of the exercise.
+ example: Some parts of this answer are correct.
+ default: ""
+ search_values:
+ type: string
+ description: Search queries that were used to build the exercise.
+ example: "['upostag=noun', 'dependency=object']"
+ default: "[]"
+ work_author:
+ type: string
+ description: Name of the person who wrote the base text for the exercise.
+ example: C. Iulius Caesar
+ default: ""
+ work_title:
+ type: string
+ description: Title of the base text for the exercise.
+ example: Noctes Atticae
+ default: ""
+ GraphData:
+ type: object
+ description: Nodes, edges and metadata for a graph.
+ properties:
+ directed:
+ type: boolean
+ description: Whether edges in the returned graph are directed.
+ example: true
+ graph:
+ type: object
+ description: Additional graph data.
+ example: {}
+ links:
+ type: array
+ description: List of edges for the graph.
+ items:
+ $ref: '#/components/schemas/Link'
+ multigraph:
+ type: boolean
+ description: Whether the graph consists of multiple subgraphs.
+ example: true
+ nodes:
+ type: array
+ description: List of nodes for the graph.
+ items:
+ $ref: '#/components/schemas/Node'
+ required:
+ - links
+ - nodes
LearningResult:
description: Learner data for completed exercises.
type: object
@@ -371,6 +497,191 @@ components:
- score_min
- score_raw
- success
+ Link:
+ type: object
+ properties:
+ annis_component_name:
+ type: string
+ description: Component name as given by ANNIS.
+ example: dep
+ annis_component_type:
+ type: string
+ description: Component type as given by ANNIS.
+ example: Pointing
+ source:
+ type: string
+ description: ID of the source node for the edge.
+ example: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ target:
+ type: string
+ description: ID of the target node for the edge.
+ example: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok3
+ udep_deprel:
+ type: string
+ description: Dependency relation described by the edge.
+ example: "det"
+ required:
+ - annis_component_name
+ - annis_component_type
+ - source
+ - target
+ Node:
+ type: object
+ properties:
+ annis_node_name:
+ type: string
+ description: Node name as given by ANNIS.
+ example: "urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1"
+ annis_node_type:
+ type: string
+ description: Node type as given by ANNIS.
+ example: "node"
+ annis_tok:
+ type: string
+ description: Raw word form as given by ANNIS.
+ example: "Galliae"
+ annis_type:
+ type: string
+ description: Node type as given by ANNIS (?).
+ example: "node"
+ id:
+ type: string
+ description: Unique identifier for the node in the SALT model.
+ example: "salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1"
+ is_oov:
+ type: boolean
+ description: Whether the raw word form is missing in a given vocabulary.
+ example: true
+ udep_lemma:
+ type: string
+ description: Lemmatized word form.
+ example: "Gallia"
+ udep_upostag:
+ type: string
+ description: Universal part of speech tag for the word form.
+ example: "PROPN"
+ udep_xpostag:
+ type: string
+ description: Language-specific part of speech tag for the word form.
+ example: "Ne"
+ udep_feats:
+ type: string
+ description: Additional morphological information.
+ example: "Case=Nom|Gender=Fem|Number=Sing"
+ solution:
+ type: string
+ description: Solution value for this node in an exercise.
+ example: ""
+ required:
+ - annis_node_name
+ - annis_node_type
+ - annis_tok
+ - annis_type
+ - id
+ - udep_lemma
+ - udep_upostag
+ Solution:
+ type: object
+ description: Correct solution for an exercise.
+ properties:
+ target:
+ $ref: '#/components/schemas/SolutionElement'
+ value:
+ $ref: '#/components/schemas/SolutionElement'
+ SolutionElement:
+ type: object
+ description: Target or value of a correct solution for an exercise.
+ properties:
+ content:
+ type: string
+ description: Content of the solution element.
+ example: unam
+ salt_id:
+ type: string
+ description: Unique identifier for the node in the SALT model.
+ example: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok9
+ sentence_id:
+ type: integer
+ description: Unique identifier for the sentence in a corpus.
+ example: 52548
+ token_id:
+ type: integer
+ description: Unique identifier for the token in a sentence.
+ example: 9
+ required:
+ - content
+ - sentence_id
+ - token_id
+ TextComplexity:
+ type: object
+ description: Mapping of various elements of text complexity to their corresponding values.
+ properties:
+ all:
+ type: number
+ format: float
+ description: Overall text complexity of the given corpus.
+ example: 42.31
+ avg_w_len:
+ type: number
+ format: float
+ description: Average length of a word in the given corpus.
+ example: 5.4
+ avg_w_per_sent:
+ type: number
+ format: float
+ description: Average number of words per sentence.
+ example: 5.4
+ lex_den:
+ type: number
+ format: float
+ minimum: 0
+ maximum: 1
+ description: Lexical density of the given corpus.
+ example: 0.43
+ n_abl_abs:
+ type: integer
+ description: Number of ablativi absoluti in the given corpus.
+ example: 1
+ n_clause:
+ type: integer
+ description: Number of clauses in the given corpus.
+ example: 1
+ n_gerund:
+ type: integer
+ description: Number of gerunds in the given corpus.
+ example: 1
+ n_inf:
+ type: integer
+ description: Number of infinitives in the given corpus.
+ example: 1
+ n_part:
+ type: integer
+ description: Number of participles in the given corpus.
+ example: 1
+ n_punct:
+ type: integer
+ description: Number of punctuation signs in the given corpus.
+ example: 1
+ n_sent:
+ type: integer
+ description: Number of sentences in the given corpus.
+ example: 1
+ n_subclause:
+ type: integer
+ description: Number of subclauses in the given corpus.
+ example: 1
+ n_types:
+ type: integer
+ description: Number of distinct word forms in the given corpus.
+ example: 1
+ n_w:
+ type: integer
+ description: Number of words in the given corpus.
+ example: 1
+ pos:
+ type: integer
+ description: Number of distinct part of speech tags in the given corpus.
+ example: 1
UpdateInfo:
description: Timestamps for updates of various resources.
type: object
@@ -398,47 +709,3 @@ components:
- created_time
- last_modified_time
- resource_type
- ExerciseBase:
- description: Base data for creating and evaluating interactive exercises.
- type: object
- properties:
- correct_feedback:
- type: string
- description: Feedback for successful completion of the exercise.
- example: Well done!
- default: ""
- general_feedback:
- type: string
- description: Feedback for finishing the exercise.
- example: You have finished the exercise.
- default: ""
- incorrect_feedback:
- type: string
- description: Feedback for failing to complete the exercise successfully.
- example: Unfortunately, that answer is wrong.
- default: ""
- instructions:
- type: string
- description: Hints for how to complete the exercise.
- example: Fill in the gaps!
- default: ""
- partially_correct_feedback:
- type: string
- description: Feedback for successfully completing certain parts of the exercise.
- example: Some parts of this answer are correct.
- default: ""
- search_values:
- type: string
- description: Search queries that were used to build the exercise.
- example: "['upostag=noun', 'dependency=object']"
- default: "[]"
- work_author:
- type: string
- description: Name of the person who wrote the base text for the exercise.
- example: C. Iulius Caesar
- default: ""
- work_title:
- type: string
- description: Title of the base text for the exercise.
- example: Noctes Atticae
- default: ""
diff --git a/mc_backend/mocks.py b/mc_backend/mocks.py
index ab883b0..e14d2aa 100644
--- a/mc_backend/mocks.py
+++ b/mc_backend/mocks.py
@@ -1,5 +1,7 @@
import json
import logging
+import os
+import shutil
from collections import OrderedDict
from datetime import datetime
from typing import List, Tuple, Dict
@@ -17,8 +19,9 @@ from sqlalchemy.exc import OperationalError
from mcserver import Config, TestingConfig
from mcserver.app import db, shutdown_session
-from mcserver.app.models import Phenomenon, PartOfSpeech, CitationLevel, SolutionElement, ExerciseData, GraphData, \
- LinkMC, NodeMC, Language, Dependency, Case, AnnisResponse, Solution, TextPart, Citation, ExerciseMC, CorpusMC
+from mcserver.app.models import Phenomenon, PartOfSpeech, CitationLevel, ExerciseData, GraphData, \
+ LinkMC, NodeMC, Language, Dependency, Case, AnnisResponse, Solution, TextPart, Citation, ExerciseMC, CorpusMC, \
+ SolutionElement
from mcserver.app.services import AnnotationService, CustomCorpusService, TextService
from mcserver.models_auto import Corpus, Exercise, UpdateInfo
@@ -86,6 +89,8 @@ class TestHelper:
if len(Mocks.app_dict) and list(Mocks.app_dict.keys())[0] != class_name:
if Config.CORPUS_STORAGE_MANAGER:
Config.CORPUS_STORAGE_MANAGER.__exit__(None, None, None)
+ if os.path.exists(Config.GRAPH_DATABASE_DIR):
+ shutil.rmtree(Config.GRAPH_DATABASE_DIR)
list(Mocks.app_dict.values())[0].app_context.pop()
shutdown_session()
db.drop_all()
@@ -649,19 +654,19 @@ class Mocks:
"solutions": [{"target": {"sentence_id": 159692, "token_id": 7,
"salt_id": "salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent159692tok7",
"content": "praecepturus"},
- "value": {"sentence_id": 0, "token_id": 0, "content": None,
+ "value": {"sentence_id": 0, "token_id": 0, "content": "",
"salt_id": "salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent159692tok1"}},
{
"target": {"sentence_id": 159692, "token_id": 9,
"salt_id": "salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent159692tok9",
"content": "aestimare"},
- "value": {"sentence_id": 0, "token_id": 0, "content": None,
+ "value": {"sentence_id": 0, "token_id": 0, "content": "",
"salt_id": "salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent159692tok1"}},
{
"target": {"sentence_id": 159693, "token_id": 5,
"salt_id": "salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent159693tok5",
"content": "debet"},
- "value": {"sentence_id": 0, "token_id": 0, "content": None,
+ "value": {"sentence_id": 0, "token_id": 0, "content": "",
"salt_id": "salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent159692tok1"}}],
"conll": "# newdoc id = /var/folders/30/yqnv6lz56r14dqhpw18knn2r0000gp/T/tmp7qn86au9\n# sent_id = 1\n# text = Caesar fortis est.\n1\tCaesar\tCaeso\tVERB\tC1|grn1|casA|gen1|stAN\tCase=Nom|Degree=Pos|Gender=Masc|Number=Sing\t2\tcsubj\t_\t_\n2\tfortis\tfortis\tADJ\tC1|grn1|casA|gen1|stAN\tCase=Nom|Degree=Pos|Gender=Masc|Number=Sing\t0\troot\troot\t_\n3\test\tsum\tAUX\tN3|modA|tem1|gen6|stAV\tMood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act\t2\tcop\t_\tSpaceAfter=No\n4\t.\t.\tPUNCT\tPunc\t_\t2\tpunct\t_\t_\n\n# sent_id = 2\n# text = Galli moriuntur.\n1\tGalli\tGallus\tPRON\tF1|grn1|casJ|gen1|stPD\tCase=Nom|Degree=Pos|Gender=Masc|Number=Plur|PronType=Dem\t2\tnsubj:pass\t_\t_\n2\tmoriuntur\tmorior\tVERB\tL3|modJ|tem1|gen9|stAV\tMood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin|Voice=Pass\t0\troot\troot\tSpaceAfter=No\n3\t.\t.\tPUNCT\tPunc\t_\t2\tpunct\t_\tSpacesAfter=\\n\n\n"}
app_dict: Dict[str, TestHelper] = {}
@@ -690,7 +695,7 @@ class Mocks:
salt_id="salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent1tok1"),
value=SolutionElement(
sentence_id=1, token_id=2, content="Caesar",
- salt_id="salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent1tok2")).serialize()
+ salt_id="salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent1tok2")).to_dict()
]).replace(" ", ""),
urn=f"{CustomCorpusService.custom_corpora[4].corpus.source_urn}:2.23.1-2.23.1")
exercise_data: ExerciseData = ExerciseData(
@@ -699,10 +704,10 @@ class Mocks:
source="doc1#sent1tok1", target="doc1#sent1tok2", udep_deprel="uddr")],
multigraph=False, nodes=[
NodeMC(annis_node_name="ann", annis_node_type="ant", annis_tok="atk", annis_type="atp",
- node_id="doc1#sent1tok1", udep_upostag="udupt", udep_xpostag="udxpt", udep_feats="udf",
+ id="doc1#sent1tok1", udep_upostag="udupt", udep_xpostag="udxpt", udep_feats="udf",
udep_lemma="udl"),
NodeMC(annis_node_name="ann", annis_node_type="ant", annis_tok="atk", annis_type="atp",
- node_id="doc1#sent1tok2", udep_upostag="udupt", udep_xpostag="udxpt", udep_feats="udf",
+ id="doc1#sent1tok2", udep_upostag="udupt", udep_xpostag="udxpt", udep_feats="udf",
udep_lemma="udl")]), uri="/test", solutions=[])
exercise_pdf: bytes = b'%PDF-1.4\n%\x93\x8c\x8b\x9e ReportLab Generated PDF document http://www.reportlab.com\n1 0 obj\n<<\n/F1 2 0 R\n>>\nendobj\n2 0 obj\n<<\n/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font\n>>\nendobj\n3 0 obj\n<<\n/BitsPerComponent 1 /ColorSpace /DeviceGray /Filter [ /ASCII85Decode ] /Height 23 /Length 223 /Subtype /Image \n /Type /XObject /Width 24\n>>\nstream\n\n 003B00 002700 002480 0E4940 114920 14B220 3CB650\n 75FE88 17FF8C 175F14 1C07E2 3803C4 703182 F8EDFC\n B2BBC2 BB6F84 31BFC2 18EA3C 0E3E00 07FC00 03F800\n 1E1800 1FF800>\n endstream\nendobj\n4 0 obj\n<<\n/Contents 8 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 7 0 R /Resources <<\n/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] /XObject <<\n/FormXob.c7485dcc8d256a6f197ed7802687f252 3 0 R\n>>\n>> /Rotate 0 /Trans <<\n\n>> \n /Type /Page\n>>\nendobj\n5 0 obj\n<<\n/PageMode /UseNone /Pages 7 0 R /Type /Catalog\n>>\nendobj\n6 0 obj\n<<\n/Author () /CreationDate'
exercise_xml: str = '
]]> 1.0000000 0.1000000 0 1 '
@@ -756,9 +761,9 @@ class Mocks:
'Romanus', 'Solomon', 'amor']
raw_text: str = "Caesar fortis est. Galli moriuntur."
static_exercises_udpipe_string: str = "1\tscribere\tscribere\n1\tcommovere\tcommovere\n1\tC\tC\n1\tgaudere\tgaudere\n1\tsignum\tsignum\n1\tvas\tvas\n1\tclarus\tclarus\n1\tcondicio\tcondicio\n1\tcom\tcum\n1\tprae\tprae\n1\tmovere\tmovere\n1\tducere\tducere\n1\tde\tde\n1\tcum\tcum\n1\tistam\tiste\n1\tnationum\tnatio\n1\tclarissimae\tclarus\n1\tmoderationem\tmoderatio\n1\tanimi\tanimus\n1\tomnium\tomnis\n1\tgentium\tgens\n1\tac\tac\n1\tvirtutem\tvirtus\n1\tprovinciae\tprovincia\n1\tCaesar\tCaesar\n1\test\tesse\n1\tsatis\tsatis\n1\tgovernment\tgovernment\n1\tsocius\tsocius\n1\tprovincia\tprovincia\n1\tpublicus\tpublicus\n1\tcivis\tcivis\n1\tatque\tatque"
- subgraph_json: str = '{"directed":true,"exercise_id":"","exercise_type":"","frequency_analysis":[],"graph":{},"links":[],"multigraph":true,"nodes":[{"annis_node_name":"urn:cts:latinLit:phi0448.phi001.perseus-lat2:1.1.1-1.1.1/doc1#sent1tok3","annis_node_type":"node","annis_tok":"Galli","annis_type":"node","id":"salt:/urn:cts:latinLit:phi0448.phi001.perseus-lat2:1.1.1-1.1.1/doc1#sent1tok3","udep_lemma":"Gallo","udep_upostag":"VERB","udep_xpostag":"L3|modQ|tem1|stAC","udep_feats":"Tense=Pres|VerbForm=Inf|Voice=Pass","solution":"","is_oov":null}],"solutions":[],"text_complexity":{},"uri":""}'
+ subgraph_json: str = '{"exercise_id":"","exercise_type":null,"frequency_analysis":null,"graph_data":{"directed":true,"graph":{},"links":[],"multigraph":true,"nodes":[{"annis_node_name":"urn:cts:latinLit:phi0448.phi001.perseus-lat2:1.1.1-1.1.1/doc1#sent1tok3","annis_node_type":"node","annis_tok":"Galli","annis_type":"node","id":"salt:/urn:cts:latinLit:phi0448.phi001.perseus-lat2:1.1.1-1.1.1/doc1#sent1tok3","is_oov":null,"udep_lemma":"Gallo","udep_upostag":"VERB","udep_xpostag":"L3|modQ|tem1|stAC","udep_feats":"Tense=Pres|VerbForm=Inf|Voice=Pass","solution":null}]},"solutions":[],"text_complexity":null,"uri":""}'
test_args: List[str] = ["tests.py", "-test"]
- text_complexity_json_string: str = '{"n_w":52,"pos":11,"n_sent":3,"avg_w_per_sent":17.33,"avg_w_len":5.79,"n_punct":3,"n_types":48,"lex_den":0.73,"n_clause":1,"n_subclause":0,"n_abl_abs":0,"n_gerund":1,"n_inf":1,"n_part":1,"all":54.53}'
+ text_complexity_json_string: str = '{"all":54.53,"avg_w_len":5.79,"avg_w_per_sent":17.33,"lex_den":0.73,"n_abl_abs":0,"n_clause":1,"n_gerund":1,"n_inf":1,"n_part":1,"n_punct":3,"n_sent":3,"n_subclause":0,"n_types":48,"n_w":52,"pos":11}'
text_list: List[Tuple[str, str]] = [("urn:cts:latinLit:phi0448.phi001.perseus-lat2:1.1.1", raw_text.split(".")[0]),
("urn:cts:latinLit:phi0448.phi001.perseus-lat2:1.1.2", raw_text.split(".")[1])]
text_parts: List[TextPart] = [
diff --git a/mc_backend/openapi/.dockerignore b/mc_backend/openapi/.dockerignore
new file mode 100644
index 0000000..f961960
--- /dev/null
+++ b/mc_backend/openapi/.dockerignore
@@ -0,0 +1,72 @@
+.travis.yaml
+.openapi-generator-ignore
+README.md
+tox.ini
+git_push.sh
+test-requirements.txt
+setup.py
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+.hypothesis/
+venv/
+.python-version
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+#Ipython Notebook
+.ipynb_checkpoints
diff --git a/mc_backend/openapi/.gitignore b/mc_backend/openapi/.gitignore
new file mode 100644
index 0000000..43995bd
--- /dev/null
+++ b/mc_backend/openapi/.gitignore
@@ -0,0 +1,66 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+.hypothesis/
+venv/
+.venv/
+.python-version
+.pytest_cache
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+#Ipython Notebook
+.ipynb_checkpoints
diff --git a/mc_backend/openapi/.openapi-generator-ignore b/mc_backend/openapi/.openapi-generator-ignore
new file mode 100644
index 0000000..7484ee5
--- /dev/null
+++ b/mc_backend/openapi/.openapi-generator-ignore
@@ -0,0 +1,23 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
diff --git a/mc_backend/openapi/.openapi-generator/VERSION b/mc_backend/openapi/.openapi-generator/VERSION
new file mode 100644
index 0000000..ecedc98
--- /dev/null
+++ b/mc_backend/openapi/.openapi-generator/VERSION
@@ -0,0 +1 @@
+4.3.1
\ No newline at end of file
diff --git a/mc_backend/openapi/.travis.yml b/mc_backend/openapi/.travis.yml
new file mode 100644
index 0000000..ad71ee5
--- /dev/null
+++ b/mc_backend/openapi/.travis.yml
@@ -0,0 +1,14 @@
+# ref: https://docs.travis-ci.com/user/languages/python
+language: python
+python:
+ - "3.2"
+ - "3.3"
+ - "3.4"
+ - "3.5"
+ - "3.6"
+ - "3.7"
+ - "3.8"
+# command to install dependencies
+install: "pip install -r requirements.txt"
+# command to run tests
+script: nosetests
diff --git a/mc_backend/openapi/Dockerfile b/mc_backend/openapi/Dockerfile
new file mode 100644
index 0000000..4857637
--- /dev/null
+++ b/mc_backend/openapi/Dockerfile
@@ -0,0 +1,16 @@
+FROM python:3-alpine
+
+RUN mkdir -p /usr/src/app
+WORKDIR /usr/src/app
+
+COPY requirements.txt /usr/src/app/
+
+RUN pip3 install --no-cache-dir -r requirements.txt
+
+COPY . /usr/src/app
+
+EXPOSE 8080
+
+ENTRYPOINT ["python3"]
+
+CMD ["-m", "openapi_server"]
\ No newline at end of file
diff --git a/mc_backend/openapi/README.md b/mc_backend/openapi/README.md
new file mode 100644
index 0000000..1b222e5
--- /dev/null
+++ b/mc_backend/openapi/README.md
@@ -0,0 +1,49 @@
+# OpenAPI generated server
+
+## Overview
+This server was generated by the [OpenAPI Generator](https://openapi-generator.tech) project. By using the
+[OpenAPI-Spec](https://openapis.org) from a remote server, you can easily generate a server stub. This
+is an example of building a OpenAPI-enabled Flask server.
+
+This example uses the [Connexion](https://github.com/zalando/connexion) library on top of Flask.
+
+## Requirements
+Python 3.5.2+
+
+## Usage
+To run the server, please execute the following from the root directory:
+
+```
+pip3 install -r requirements.txt
+python3 -m openapi_server
+```
+
+and open your browser to here:
+
+```
+http://localhost:8080/mc/api/v1.0/ui/
+```
+
+Your OpenAPI definition lives here:
+
+```
+http://localhost:8080/mc/api/v1.0/openapi.json
+```
+
+To launch the integration tests, use tox:
+```
+sudo pip install tox
+tox
+```
+
+## Running with Docker
+
+To run the server on a Docker container, please execute the following from the root directory:
+
+```bash
+# building the image
+docker build -t openapi_server .
+
+# starting up a container
+docker run -p 8080:8080 openapi_server
+```
\ No newline at end of file
diff --git a/mc_backend/openapi/git_push.sh b/mc_backend/openapi/git_push.sh
new file mode 100644
index 0000000..ced3be2
--- /dev/null
+++ b/mc_backend/openapi/git_push.sh
@@ -0,0 +1,58 @@
+#!/bin/sh
+# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/
+#
+# Usage example: /bin/sh ./git_push.sh wing328 openapi-pestore-perl "minor update" "gitlab.com"
+
+git_user_id=$1
+git_repo_id=$2
+release_note=$3
+git_host=$4
+
+if [ "$git_host" = "" ]; then
+ git_host="github.com"
+ echo "[INFO] No command line input provided. Set \$git_host to $git_host"
+fi
+
+if [ "$git_user_id" = "" ]; then
+ git_user_id="GIT_USER_ID"
+ echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id"
+fi
+
+if [ "$git_repo_id" = "" ]; then
+ git_repo_id="GIT_REPO_ID"
+ echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id"
+fi
+
+if [ "$release_note" = "" ]; then
+ release_note="Minor update"
+ echo "[INFO] No command line input provided. Set \$release_note to $release_note"
+fi
+
+# Initialize the local directory as a Git repository
+git init
+
+# Adds the files in the local repository and stages them for commit.
+git add .
+
+# Commits the tracked changes and prepares them to be pushed to a remote repository.
+git commit -m "$release_note"
+
+# Sets the new remote
+git_remote=`git remote`
+if [ "$git_remote" = "" ]; then # git remote not defined
+
+ if [ "$GIT_TOKEN" = "" ]; then
+ echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment."
+ git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git
+ else
+ git remote add origin https://${git_user_id}:${GIT_TOKEN}@${git_host}/${git_user_id}/${git_repo_id}.git
+ fi
+
+fi
+
+git pull origin master
+
+# Pushes (Forces) the changes in the local repository up to the remote repository
+echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git"
+git push origin master 2>&1 | grep -v 'To https'
+
diff --git a/mc_backend/openapi/openapi_server/__init__.py b/mc_backend/openapi/openapi_server/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/mc_backend/openapi/openapi_server/__main__.py b/mc_backend/openapi/openapi_server/__main__.py
new file mode 100644
index 0000000..6f69b80
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/__main__.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python3
+
+import connexion
+
+from openapi.openapi_server import encoder
+
+
+def main():
+ app = connexion.App(__name__, specification_dir='./openapi/')
+ app.app.json_encoder = encoder.JSONEncoder
+ app.add_api('openapi.yaml',
+ arguments={'title': 'Machina Callida Backend REST API'},
+ pythonic_params=True)
+ app.run(port=8080)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/mc_backend/openapi/openapi_server/controllers/__init__.py b/mc_backend/openapi/openapi_server/controllers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/mc_backend/openapi/openapi_server/controllers/default_controller.py b/mc_backend/openapi/openapi_server/controllers/default_controller.py
new file mode 100644
index 0000000..3cdc45c
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/controllers/default_controller.py
@@ -0,0 +1,94 @@
+import connexion
+import six
+
+from openapi.openapi_server.models.annis_response import AnnisResponse # noqa: E501
+from openapi.openapi_server.models.corpus import Corpus # noqa: E501
+from openapi.openapi_server.models.exercise_base import ExerciseBase # noqa: E501
+from openapi.openapi_server.models.unknownbasetype import UNKNOWN_BASE_TYPE # noqa: E501
+from openapi.openapi_server import util
+
+
+def mcserver_app_api_corpus_api_delete(cid): # noqa: E501
+ """Deletes a single corpus by ID.
+
+ # noqa: E501
+
+ :param cid: Corpus identifier.
+ :type cid: int
+
+ :rtype: bool
+ """
+ return 'do some magic!'
+
+
+def mcserver_app_api_corpus_api_get(cid): # noqa: E501
+ """Returns a single corpus by ID.
+
+ # noqa: E501
+
+ :param cid: Corpus identifier.
+ :type cid: int
+
+ :rtype: Corpus
+ """
+ return 'do some magic!'
+
+
+def mcserver_app_api_corpus_api_patch(cid, author=None, source_urn=None, title=None): # noqa: E501
+ """Updates a single corpus by ID.
+
+ # noqa: E501
+
+ :param cid: Corpus identifier.
+ :type cid: int
+ :param author: Author of the texts in the corpus.
+ :type author: str
+ :param source_urn: CTS base URN for referencing the corpus.
+ :type source_urn: str
+ :param title: Corpus title.
+ :type title: str
+
+ :rtype: Corpus
+ """
+ return 'do some magic!'
+
+
+def mcserver_app_api_corpus_list_api_get(last_update_time): # noqa: E501
+ """Returns a list of corpora.
+
+ # noqa: E501
+
+ :param last_update_time: Time (in milliseconds) of the last update.
+ :type last_update_time: int
+
+ :rtype: Corpus
+ """
+ return 'do some magic!'
+
+
+def mcserver_app_api_exercise_api_get(eid): # noqa: E501
+ """Returns exercise data by ID.
+
+ # noqa: E501
+
+ :param eid: Unique identifier (UUID) for the exercise.
+ :type eid: str
+
+ :rtype: AnnisResponse
+ """
+ return 'do some magic!'
+
+
+def mcserver_app_api_exercise_api_post(unknown_base_type): # noqa: E501
+ """Creates a new exercise.
+
+ # noqa: E501
+
+ :param unknown_base_type:
+ :type unknown_base_type: dict | bytes
+
+ :rtype: AnnisResponse
+ """
+ if connexion.request.is_json:
+ unknown_base_type = UNKNOWN_BASE_TYPE.from_dict(connexion.request.get_json()) # noqa: E501
+ return 'do some magic!'
diff --git a/mc_backend/openapi/openapi_server/controllers/security_controller_.py b/mc_backend/openapi/openapi_server/controllers/security_controller_.py
new file mode 100644
index 0000000..ecac405
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/controllers/security_controller_.py
@@ -0,0 +1,3 @@
+from typing import List
+
+
diff --git a/mc_backend/openapi/openapi_server/encoder.py b/mc_backend/openapi/openapi_server/encoder.py
new file mode 100644
index 0000000..b2a9e8e
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/encoder.py
@@ -0,0 +1,20 @@
+from connexion.apps.flask_app import FlaskJSONEncoder
+import six
+
+from openapi.openapi_server.models.base_model_ import Model
+
+
+class JSONEncoder(FlaskJSONEncoder):
+ include_nulls = False
+
+ def default(self, o):
+ if isinstance(o, Model):
+ dikt = {}
+ for attr, _ in six.iteritems(o.openapi_types):
+ value = getattr(o, attr)
+ if value is None and not self.include_nulls:
+ continue
+ attr = o.attribute_map[attr]
+ dikt[attr] = value
+ return dikt
+ return FlaskJSONEncoder.default(self, o)
diff --git a/mc_backend/openapi/openapi_server/models/__init__.py b/mc_backend/openapi/openapi_server/models/__init__.py
new file mode 100644
index 0000000..64b3bd9
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/__init__.py
@@ -0,0 +1,19 @@
+# coding: utf-8
+
+# flake8: noqa
+from __future__ import absolute_import
+# import models into model package
+from openapi.openapi_server.models.annis_response import AnnisResponse
+from openapi.openapi_server.models.annis_response_frequency_analysis import AnnisResponseFrequencyAnalysis
+from openapi.openapi_server.models.corpus import Corpus
+from openapi.openapi_server.models.exercise import Exercise
+from openapi.openapi_server.models.exercise_all_of import ExerciseAllOf
+from openapi.openapi_server.models.exercise_base import ExerciseBase
+from openapi.openapi_server.models.graph_data import GraphData
+from openapi.openapi_server.models.learning_result import LearningResult
+from openapi.openapi_server.models.link import Link
+from openapi.openapi_server.models.node import Node
+from openapi.openapi_server.models.solution import Solution
+from openapi.openapi_server.models.solution_element import SolutionElement
+from openapi.openapi_server.models.text_complexity import TextComplexity
+from openapi.openapi_server.models.update_info import UpdateInfo
diff --git a/mc_backend/openapi/openapi_server/models/annis_response.py b/mc_backend/openapi/openapi_server/models/annis_response.py
new file mode 100644
index 0000000..e906140
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/annis_response.py
@@ -0,0 +1,238 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server.models.annis_response_frequency_analysis import AnnisResponseFrequencyAnalysis
+from openapi.openapi_server.models.graph_data import GraphData
+from openapi.openapi_server.models.solution import Solution
+from openapi.openapi_server.models.text_complexity import TextComplexity
+from openapi.openapi_server import util
+
+from openapi.openapi_server.models.annis_response_frequency_analysis import AnnisResponseFrequencyAnalysis # noqa: E501
+from openapi.openapi_server.models.graph_data import GraphData # noqa: E501
+from openapi.openapi_server.models.solution import Solution # noqa: E501
+from openapi.openapi_server.models.text_complexity import TextComplexity # noqa: E501
+
+class AnnisResponse(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, exercise_id=None, exercise_type=None, frequency_analysis=None, graph_data=None, solutions=None, text_complexity=None, uri=None): # noqa: E501
+ """AnnisResponse - a model defined in OpenAPI
+
+ :param exercise_id: The exercise_id of this AnnisResponse. # noqa: E501
+ :type exercise_id: str
+ :param exercise_type: The exercise_type of this AnnisResponse. # noqa: E501
+ :type exercise_type: str
+ :param frequency_analysis: The frequency_analysis of this AnnisResponse. # noqa: E501
+ :type frequency_analysis: List[AnnisResponseFrequencyAnalysis]
+ :param graph_data: The graph_data of this AnnisResponse. # noqa: E501
+ :type graph_data: GraphData
+ :param solutions: The solutions of this AnnisResponse. # noqa: E501
+ :type solutions: List[Solution]
+ :param text_complexity: The text_complexity of this AnnisResponse. # noqa: E501
+ :type text_complexity: TextComplexity
+ :param uri: The uri of this AnnisResponse. # noqa: E501
+ :type uri: str
+ """
+ self.openapi_types = {
+ 'exercise_id': str,
+ 'exercise_type': str,
+ 'frequency_analysis': List[AnnisResponseFrequencyAnalysis],
+ 'graph_data': GraphData,
+ 'solutions': List[Solution],
+ 'text_complexity': TextComplexity,
+ 'uri': str
+ }
+
+ self.attribute_map = {
+ 'exercise_id': 'exercise_id',
+ 'exercise_type': 'exercise_type',
+ 'frequency_analysis': 'frequency_analysis',
+ 'graph_data': 'graph_data',
+ 'solutions': 'solutions',
+ 'text_complexity': 'text_complexity',
+ 'uri': 'uri'
+ }
+
+ self._exercise_id = exercise_id
+ self._exercise_type = exercise_type
+ self._frequency_analysis = frequency_analysis
+ self._graph_data = graph_data
+ self._solutions = solutions
+ self._text_complexity = text_complexity
+ self._uri = uri
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'AnnisResponse':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The AnnisResponse of this AnnisResponse. # noqa: E501
+ :rtype: AnnisResponse
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def exercise_id(self):
+ """Gets the exercise_id of this AnnisResponse.
+
+ Unique identifier (UUID) for the exercise. # noqa: E501
+
+ :return: The exercise_id of this AnnisResponse.
+ :rtype: str
+ """
+ return self._exercise_id
+
+ @exercise_id.setter
+ def exercise_id(self, exercise_id):
+ """Sets the exercise_id of this AnnisResponse.
+
+ Unique identifier (UUID) for the exercise. # noqa: E501
+
+ :param exercise_id: The exercise_id of this AnnisResponse.
+ :type exercise_id: str
+ """
+
+ self._exercise_id = exercise_id
+
+ @property
+ def exercise_type(self):
+ """Gets the exercise_type of this AnnisResponse.
+
+ Type of exercise, concerning interaction and layout. # noqa: E501
+
+ :return: The exercise_type of this AnnisResponse.
+ :rtype: str
+ """
+ return self._exercise_type
+
+ @exercise_type.setter
+ def exercise_type(self, exercise_type):
+ """Sets the exercise_type of this AnnisResponse.
+
+ Type of exercise, concerning interaction and layout. # noqa: E501
+
+ :param exercise_type: The exercise_type of this AnnisResponse.
+ :type exercise_type: str
+ """
+
+ self._exercise_type = exercise_type
+
+ @property
+ def frequency_analysis(self):
+ """Gets the frequency_analysis of this AnnisResponse.
+
+ List of items with frequency data for linguistic phenomena. # noqa: E501
+
+ :return: The frequency_analysis of this AnnisResponse.
+ :rtype: List[AnnisResponseFrequencyAnalysis]
+ """
+ return self._frequency_analysis
+
+ @frequency_analysis.setter
+ def frequency_analysis(self, frequency_analysis):
+ """Sets the frequency_analysis of this AnnisResponse.
+
+ List of items with frequency data for linguistic phenomena. # noqa: E501
+
+ :param frequency_analysis: The frequency_analysis of this AnnisResponse.
+ :type frequency_analysis: List[AnnisResponseFrequencyAnalysis]
+ """
+
+ self._frequency_analysis = frequency_analysis
+
+ @property
+ def graph_data(self):
+ """Gets the graph_data of this AnnisResponse.
+
+
+ :return: The graph_data of this AnnisResponse.
+ :rtype: GraphData
+ """
+ return self._graph_data
+
+ @graph_data.setter
+ def graph_data(self, graph_data):
+ """Sets the graph_data of this AnnisResponse.
+
+
+ :param graph_data: The graph_data of this AnnisResponse.
+ :type graph_data: GraphData
+ """
+
+ self._graph_data = graph_data
+
+ @property
+ def solutions(self):
+ """Gets the solutions of this AnnisResponse.
+
+ Correct solutions for this exercise. # noqa: E501
+
+ :return: The solutions of this AnnisResponse.
+ :rtype: List[Solution]
+ """
+ return self._solutions
+
+ @solutions.setter
+ def solutions(self, solutions):
+ """Sets the solutions of this AnnisResponse.
+
+ Correct solutions for this exercise. # noqa: E501
+
+ :param solutions: The solutions of this AnnisResponse.
+ :type solutions: List[Solution]
+ """
+
+ self._solutions = solutions
+
+ @property
+ def text_complexity(self):
+ """Gets the text_complexity of this AnnisResponse.
+
+
+ :return: The text_complexity of this AnnisResponse.
+ :rtype: TextComplexity
+ """
+ return self._text_complexity
+
+ @text_complexity.setter
+ def text_complexity(self, text_complexity):
+ """Sets the text_complexity of this AnnisResponse.
+
+
+ :param text_complexity: The text_complexity of this AnnisResponse.
+ :type text_complexity: TextComplexity
+ """
+
+ self._text_complexity = text_complexity
+
+ @property
+ def uri(self):
+ """Gets the uri of this AnnisResponse.
+
+ URI for accessing the exercise in this API. # noqa: E501
+
+ :return: The uri of this AnnisResponse.
+ :rtype: str
+ """
+ return self._uri
+
+ @uri.setter
+ def uri(self, uri):
+ """Sets the uri of this AnnisResponse.
+
+ URI for accessing the exercise in this API. # noqa: E501
+
+ :param uri: The uri of this AnnisResponse.
+ :type uri: str
+ """
+
+ self._uri = uri
diff --git a/mc_backend/openapi/openapi_server/models/annis_response_frequency_analysis.py b/mc_backend/openapi/openapi_server/models/annis_response_frequency_analysis.py
new file mode 100644
index 0000000..f037f10
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/annis_response_frequency_analysis.py
@@ -0,0 +1,122 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class AnnisResponseFrequencyAnalysis(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, count=None, phenomena=None, values=None): # noqa: E501
+ """AnnisResponseFrequencyAnalysis - a model defined in OpenAPI
+
+ :param count: The count of this AnnisResponseFrequencyAnalysis. # noqa: E501
+ :type count: int
+ :param phenomena: The phenomena of this AnnisResponseFrequencyAnalysis. # noqa: E501
+ :type phenomena: List[str]
+ :param values: The values of this AnnisResponseFrequencyAnalysis. # noqa: E501
+ :type values: List[str]
+ """
+ self.openapi_types = {
+ 'count': int,
+ 'phenomena': List[str],
+ 'values': List[str]
+ }
+
+ self.attribute_map = {
+ 'count': 'count',
+ 'phenomena': 'phenomena',
+ 'values': 'values'
+ }
+
+ self._count = count
+ self._phenomena = phenomena
+ self._values = values
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'AnnisResponseFrequencyAnalysis':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The AnnisResponse_frequency_analysis of this AnnisResponseFrequencyAnalysis. # noqa: E501
+ :rtype: AnnisResponseFrequencyAnalysis
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def count(self):
+ """Gets the count of this AnnisResponseFrequencyAnalysis.
+
+ How often the given combination of values occurred. # noqa: E501
+
+ :return: The count of this AnnisResponseFrequencyAnalysis.
+ :rtype: int
+ """
+ return self._count
+
+ @count.setter
+ def count(self, count):
+ """Sets the count of this AnnisResponseFrequencyAnalysis.
+
+ How often the given combination of values occurred. # noqa: E501
+
+ :param count: The count of this AnnisResponseFrequencyAnalysis.
+ :type count: int
+ """
+
+ self._count = count
+
+ @property
+ def phenomena(self):
+ """Gets the phenomena of this AnnisResponseFrequencyAnalysis.
+
+ Labels for the phenomena described in this frequency entry. # noqa: E501
+
+ :return: The phenomena of this AnnisResponseFrequencyAnalysis.
+ :rtype: List[str]
+ """
+ return self._phenomena
+
+ @phenomena.setter
+ def phenomena(self, phenomena):
+ """Sets the phenomena of this AnnisResponseFrequencyAnalysis.
+
+ Labels for the phenomena described in this frequency entry. # noqa: E501
+
+ :param phenomena: The phenomena of this AnnisResponseFrequencyAnalysis.
+ :type phenomena: List[str]
+ """
+
+ self._phenomena = phenomena
+
+ @property
+ def values(self):
+ """Gets the values of this AnnisResponseFrequencyAnalysis.
+
+ Values for the phenomena described in this frequency entry. # noqa: E501
+
+ :return: The values of this AnnisResponseFrequencyAnalysis.
+ :rtype: List[str]
+ """
+ return self._values
+
+ @values.setter
+ def values(self, values):
+ """Sets the values of this AnnisResponseFrequencyAnalysis.
+
+ Values for the phenomena described in this frequency entry. # noqa: E501
+
+ :param values: The values of this AnnisResponseFrequencyAnalysis.
+ :type values: List[str]
+ """
+
+ self._values = values
diff --git a/mc_backend/openapi/openapi_server/models/base_model_.py b/mc_backend/openapi/openapi_server/models/base_model_.py
new file mode 100644
index 0000000..8085b2d
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/base_model_.py
@@ -0,0 +1,69 @@
+import pprint
+
+import six
+import typing
+
+from openapi.openapi_server import util
+
+T = typing.TypeVar('T')
+
+
+class Model(object):
+ # openapiTypes: The key is attribute name and the
+ # value is attribute type.
+ openapi_types = {}
+
+ # attributeMap: The key is attribute name and the
+ # value is json key in definition.
+ attribute_map = {}
+
+ @classmethod
+ def from_dict(cls: typing.Type[T], dikt) -> T:
+ """Returns the dict as a model"""
+ return util.deserialize_model(dikt, cls)
+
+ def to_dict(self):
+ """Returns the model properties as a dict
+
+ :rtype: dict
+ """
+ result = {}
+
+ for attr, _ in six.iteritems(self.openapi_types):
+ value = getattr(self, attr)
+ if isinstance(value, list):
+ result[attr] = list(map(
+ lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
+ value
+ ))
+ elif hasattr(value, "to_dict"):
+ result[attr] = value.to_dict()
+ elif isinstance(value, dict):
+ result[attr] = dict(map(
+ lambda item: (item[0], item[1].to_dict())
+ if hasattr(item[1], "to_dict") else item,
+ value.items()
+ ))
+ else:
+ result[attr] = value
+
+ return result
+
+ def to_str(self):
+ """Returns the string representation of the model
+
+ :rtype: str
+ """
+ return pprint.pformat(self.to_dict())
+
+ def __repr__(self):
+ """For `print` and `pprint`"""
+ return self.to_str()
+
+ def __eq__(self, other):
+ """Returns true if both objects are equal"""
+ return self.__dict__ == other.__dict__
+
+ def __ne__(self, other):
+ """Returns true if both objects are not equal"""
+ return not self == other
diff --git a/mc_backend/openapi/openapi_server/models/corpus.py b/mc_backend/openapi/openapi_server/models/corpus.py
new file mode 100644
index 0000000..8c1d794
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/corpus.py
@@ -0,0 +1,236 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class Corpus(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, author='Anonymus', cid=None, citation_level_1='default', citation_level_2='default', citation_level_3='default', source_urn=None, title='Anonymus'): # noqa: E501
+ """Corpus - a model defined in OpenAPI
+
+ :param author: The author of this Corpus. # noqa: E501
+ :type author: str
+ :param cid: The cid of this Corpus. # noqa: E501
+ :type cid: int
+ :param citation_level_1: The citation_level_1 of this Corpus. # noqa: E501
+ :type citation_level_1: str
+ :param citation_level_2: The citation_level_2 of this Corpus. # noqa: E501
+ :type citation_level_2: str
+ :param citation_level_3: The citation_level_3 of this Corpus. # noqa: E501
+ :type citation_level_3: str
+ :param source_urn: The source_urn of this Corpus. # noqa: E501
+ :type source_urn: str
+ :param title: The title of this Corpus. # noqa: E501
+ :type title: str
+ """
+ self.openapi_types = {
+ 'author': str,
+ 'cid': int,
+ 'citation_level_1': str,
+ 'citation_level_2': str,
+ 'citation_level_3': str,
+ 'source_urn': str,
+ 'title': str
+ }
+
+ self.attribute_map = {
+ 'author': 'author',
+ 'cid': 'cid',
+ 'citation_level_1': 'citation_level_1',
+ 'citation_level_2': 'citation_level_2',
+ 'citation_level_3': 'citation_level_3',
+ 'source_urn': 'source_urn',
+ 'title': 'title'
+ }
+
+ self._author = author
+ self._cid = cid
+ self._citation_level_1 = citation_level_1
+ self._citation_level_2 = citation_level_2
+ self._citation_level_3 = citation_level_3
+ self._source_urn = source_urn
+ self._title = title
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'Corpus':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The Corpus of this Corpus. # noqa: E501
+ :rtype: Corpus
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def author(self):
+ """Gets the author of this Corpus.
+
+ Author of the texts in the corpus. # noqa: E501
+
+ :return: The author of this Corpus.
+ :rtype: str
+ """
+ return self._author
+
+ @author.setter
+ def author(self, author):
+ """Sets the author of this Corpus.
+
+ Author of the texts in the corpus. # noqa: E501
+
+ :param author: The author of this Corpus.
+ :type author: str
+ """
+
+ self._author = author
+
+ @property
+ def cid(self):
+ """Gets the cid of this Corpus.
+
+ Unique identifier for the corpus. # noqa: E501
+
+ :return: The cid of this Corpus.
+ :rtype: int
+ """
+ return self._cid
+
+ @cid.setter
+ def cid(self, cid):
+ """Sets the cid of this Corpus.
+
+ Unique identifier for the corpus. # noqa: E501
+
+ :param cid: The cid of this Corpus.
+ :type cid: int
+ """
+
+ self._cid = cid
+
+ @property
+ def citation_level_1(self):
+ """Gets the citation_level_1 of this Corpus.
+
+ First level for citing the corpus. # noqa: E501
+
+ :return: The citation_level_1 of this Corpus.
+ :rtype: str
+ """
+ return self._citation_level_1
+
+ @citation_level_1.setter
+ def citation_level_1(self, citation_level_1):
+ """Sets the citation_level_1 of this Corpus.
+
+ First level for citing the corpus. # noqa: E501
+
+ :param citation_level_1: The citation_level_1 of this Corpus.
+ :type citation_level_1: str
+ """
+
+ self._citation_level_1 = citation_level_1
+
+ @property
+ def citation_level_2(self):
+ """Gets the citation_level_2 of this Corpus.
+
+ Second level for citing the corpus. # noqa: E501
+
+ :return: The citation_level_2 of this Corpus.
+ :rtype: str
+ """
+ return self._citation_level_2
+
+ @citation_level_2.setter
+ def citation_level_2(self, citation_level_2):
+ """Sets the citation_level_2 of this Corpus.
+
+ Second level for citing the corpus. # noqa: E501
+
+ :param citation_level_2: The citation_level_2 of this Corpus.
+ :type citation_level_2: str
+ """
+
+ self._citation_level_2 = citation_level_2
+
+ @property
+ def citation_level_3(self):
+ """Gets the citation_level_3 of this Corpus.
+
+ Third level for citing the corpus. # noqa: E501
+
+ :return: The citation_level_3 of this Corpus.
+ :rtype: str
+ """
+ return self._citation_level_3
+
+ @citation_level_3.setter
+ def citation_level_3(self, citation_level_3):
+ """Sets the citation_level_3 of this Corpus.
+
+ Third level for citing the corpus. # noqa: E501
+
+ :param citation_level_3: The citation_level_3 of this Corpus.
+ :type citation_level_3: str
+ """
+
+ self._citation_level_3 = citation_level_3
+
+ @property
+ def source_urn(self):
+ """Gets the source_urn of this Corpus.
+
+ CTS base URN for referencing the corpus. # noqa: E501
+
+ :return: The source_urn of this Corpus.
+ :rtype: str
+ """
+ return self._source_urn
+
+ @source_urn.setter
+ def source_urn(self, source_urn):
+ """Sets the source_urn of this Corpus.
+
+ CTS base URN for referencing the corpus. # noqa: E501
+
+ :param source_urn: The source_urn of this Corpus.
+ :type source_urn: str
+ """
+ if source_urn is None:
+ raise ValueError("Invalid value for `source_urn`, must not be `None`") # noqa: E501
+
+ self._source_urn = source_urn
+
+ @property
+ def title(self):
+ """Gets the title of this Corpus.
+
+ Corpus title. # noqa: E501
+
+ :return: The title of this Corpus.
+ :rtype: str
+ """
+ return self._title
+
+ @title.setter
+ def title(self, title):
+ """Sets the title of this Corpus.
+
+ Corpus title. # noqa: E501
+
+ :param title: The title of this Corpus.
+ :type title: str
+ """
+
+ self._title = title
diff --git a/mc_backend/openapi/openapi_server/models/exercise.py b/mc_backend/openapi/openapi_server/models/exercise.py
new file mode 100644
index 0000000..fdeb700
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/exercise.py
@@ -0,0 +1,522 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server.models.exercise_all_of import ExerciseAllOf
+from openapi.openapi_server.models.exercise_base import ExerciseBase
+from openapi.openapi_server import util
+
+from openapi.openapi_server.models.exercise_all_of import ExerciseAllOf # noqa: E501
+from openapi.openapi_server.models.exercise_base import ExerciseBase # noqa: E501
+
+class Exercise(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, correct_feedback='', general_feedback='', incorrect_feedback='', instructions='', partially_correct_feedback='', search_values='[]', work_author='', work_title='', conll='', eid=None, exercise_type='', exercise_type_translation='', language='de', last_access_time=None, solutions='[]', text_complexity=0, urn=''): # noqa: E501
+ """Exercise - a model defined in OpenAPI
+
+ :param correct_feedback: The correct_feedback of this Exercise. # noqa: E501
+ :type correct_feedback: str
+ :param general_feedback: The general_feedback of this Exercise. # noqa: E501
+ :type general_feedback: str
+ :param incorrect_feedback: The incorrect_feedback of this Exercise. # noqa: E501
+ :type incorrect_feedback: str
+ :param instructions: The instructions of this Exercise. # noqa: E501
+ :type instructions: str
+ :param partially_correct_feedback: The partially_correct_feedback of this Exercise. # noqa: E501
+ :type partially_correct_feedback: str
+ :param search_values: The search_values of this Exercise. # noqa: E501
+ :type search_values: str
+ :param work_author: The work_author of this Exercise. # noqa: E501
+ :type work_author: str
+ :param work_title: The work_title of this Exercise. # noqa: E501
+ :type work_title: str
+ :param conll: The conll of this Exercise. # noqa: E501
+ :type conll: str
+ :param eid: The eid of this Exercise. # noqa: E501
+ :type eid: str
+ :param exercise_type: The exercise_type of this Exercise. # noqa: E501
+ :type exercise_type: str
+ :param exercise_type_translation: The exercise_type_translation of this Exercise. # noqa: E501
+ :type exercise_type_translation: str
+ :param language: The language of this Exercise. # noqa: E501
+ :type language: str
+ :param last_access_time: The last_access_time of this Exercise. # noqa: E501
+ :type last_access_time: float
+ :param solutions: The solutions of this Exercise. # noqa: E501
+ :type solutions: str
+ :param text_complexity: The text_complexity of this Exercise. # noqa: E501
+ :type text_complexity: float
+ :param urn: The urn of this Exercise. # noqa: E501
+ :type urn: str
+ """
+ self.openapi_types = {
+ 'correct_feedback': str,
+ 'general_feedback': str,
+ 'incorrect_feedback': str,
+ 'instructions': str,
+ 'partially_correct_feedback': str,
+ 'search_values': str,
+ 'work_author': str,
+ 'work_title': str,
+ 'conll': str,
+ 'eid': str,
+ 'exercise_type': str,
+ 'exercise_type_translation': str,
+ 'language': str,
+ 'last_access_time': float,
+ 'solutions': str,
+ 'text_complexity': float,
+ 'urn': str
+ }
+
+ self.attribute_map = {
+ 'correct_feedback': 'correct_feedback',
+ 'general_feedback': 'general_feedback',
+ 'incorrect_feedback': 'incorrect_feedback',
+ 'instructions': 'instructions',
+ 'partially_correct_feedback': 'partially_correct_feedback',
+ 'search_values': 'search_values',
+ 'work_author': 'work_author',
+ 'work_title': 'work_title',
+ 'conll': 'conll',
+ 'eid': 'eid',
+ 'exercise_type': 'exercise_type',
+ 'exercise_type_translation': 'exercise_type_translation',
+ 'language': 'language',
+ 'last_access_time': 'last_access_time',
+ 'solutions': 'solutions',
+ 'text_complexity': 'text_complexity',
+ 'urn': 'urn'
+ }
+
+ self._correct_feedback = correct_feedback
+ self._general_feedback = general_feedback
+ self._incorrect_feedback = incorrect_feedback
+ self._instructions = instructions
+ self._partially_correct_feedback = partially_correct_feedback
+ self._search_values = search_values
+ self._work_author = work_author
+ self._work_title = work_title
+ self._conll = conll
+ self._eid = eid
+ self._exercise_type = exercise_type
+ self._exercise_type_translation = exercise_type_translation
+ self._language = language
+ self._last_access_time = last_access_time
+ self._solutions = solutions
+ self._text_complexity = text_complexity
+ self._urn = urn
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'Exercise':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The Exercise of this Exercise. # noqa: E501
+ :rtype: Exercise
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def correct_feedback(self):
+ """Gets the correct_feedback of this Exercise.
+
+ Feedback for successful completion of the exercise. # noqa: E501
+
+ :return: The correct_feedback of this Exercise.
+ :rtype: str
+ """
+ return self._correct_feedback
+
+ @correct_feedback.setter
+ def correct_feedback(self, correct_feedback):
+ """Sets the correct_feedback of this Exercise.
+
+ Feedback for successful completion of the exercise. # noqa: E501
+
+ :param correct_feedback: The correct_feedback of this Exercise.
+ :type correct_feedback: str
+ """
+
+ self._correct_feedback = correct_feedback
+
+ @property
+ def general_feedback(self):
+ """Gets the general_feedback of this Exercise.
+
+ Feedback for finishing the exercise. # noqa: E501
+
+ :return: The general_feedback of this Exercise.
+ :rtype: str
+ """
+ return self._general_feedback
+
+ @general_feedback.setter
+ def general_feedback(self, general_feedback):
+ """Sets the general_feedback of this Exercise.
+
+ Feedback for finishing the exercise. # noqa: E501
+
+ :param general_feedback: The general_feedback of this Exercise.
+ :type general_feedback: str
+ """
+
+ self._general_feedback = general_feedback
+
+ @property
+ def incorrect_feedback(self):
+ """Gets the incorrect_feedback of this Exercise.
+
+ Feedback for failing to complete the exercise successfully. # noqa: E501
+
+ :return: The incorrect_feedback of this Exercise.
+ :rtype: str
+ """
+ return self._incorrect_feedback
+
+ @incorrect_feedback.setter
+ def incorrect_feedback(self, incorrect_feedback):
+ """Sets the incorrect_feedback of this Exercise.
+
+ Feedback for failing to complete the exercise successfully. # noqa: E501
+
+ :param incorrect_feedback: The incorrect_feedback of this Exercise.
+ :type incorrect_feedback: str
+ """
+
+ self._incorrect_feedback = incorrect_feedback
+
+ @property
+ def instructions(self):
+ """Gets the instructions of this Exercise.
+
+ Hints for how to complete the exercise. # noqa: E501
+
+ :return: The instructions of this Exercise.
+ :rtype: str
+ """
+ return self._instructions
+
+ @instructions.setter
+ def instructions(self, instructions):
+ """Sets the instructions of this Exercise.
+
+ Hints for how to complete the exercise. # noqa: E501
+
+ :param instructions: The instructions of this Exercise.
+ :type instructions: str
+ """
+
+ self._instructions = instructions
+
+ @property
+ def partially_correct_feedback(self):
+ """Gets the partially_correct_feedback of this Exercise.
+
+ Feedback for successfully completing certain parts of the exercise. # noqa: E501
+
+ :return: The partially_correct_feedback of this Exercise.
+ :rtype: str
+ """
+ return self._partially_correct_feedback
+
+ @partially_correct_feedback.setter
+ def partially_correct_feedback(self, partially_correct_feedback):
+ """Sets the partially_correct_feedback of this Exercise.
+
+ Feedback for successfully completing certain parts of the exercise. # noqa: E501
+
+ :param partially_correct_feedback: The partially_correct_feedback of this Exercise.
+ :type partially_correct_feedback: str
+ """
+
+ self._partially_correct_feedback = partially_correct_feedback
+
+ @property
+ def search_values(self):
+ """Gets the search_values of this Exercise.
+
+ Search queries that were used to build the exercise. # noqa: E501
+
+ :return: The search_values of this Exercise.
+ :rtype: str
+ """
+ return self._search_values
+
+ @search_values.setter
+ def search_values(self, search_values):
+ """Sets the search_values of this Exercise.
+
+ Search queries that were used to build the exercise. # noqa: E501
+
+ :param search_values: The search_values of this Exercise.
+ :type search_values: str
+ """
+
+ self._search_values = search_values
+
+ @property
+ def work_author(self):
+ """Gets the work_author of this Exercise.
+
+ Name of the person who wrote the base text for the exercise. # noqa: E501
+
+ :return: The work_author of this Exercise.
+ :rtype: str
+ """
+ return self._work_author
+
+ @work_author.setter
+ def work_author(self, work_author):
+ """Sets the work_author of this Exercise.
+
+ Name of the person who wrote the base text for the exercise. # noqa: E501
+
+ :param work_author: The work_author of this Exercise.
+ :type work_author: str
+ """
+
+ self._work_author = work_author
+
+ @property
+ def work_title(self):
+ """Gets the work_title of this Exercise.
+
+ Title of the base text for the exercise. # noqa: E501
+
+ :return: The work_title of this Exercise.
+ :rtype: str
+ """
+ return self._work_title
+
+ @work_title.setter
+ def work_title(self, work_title):
+ """Sets the work_title of this Exercise.
+
+ Title of the base text for the exercise. # noqa: E501
+
+ :param work_title: The work_title of this Exercise.
+ :type work_title: str
+ """
+
+ self._work_title = work_title
+
+ @property
+ def conll(self):
+ """Gets the conll of this Exercise.
+
+ CONLL-formatted linguistic annotations represented as a single string. # noqa: E501
+
+ :return: The conll of this Exercise.
+ :rtype: str
+ """
+ return self._conll
+
+ @conll.setter
+ def conll(self, conll):
+ """Sets the conll of this Exercise.
+
+ CONLL-formatted linguistic annotations represented as a single string. # noqa: E501
+
+ :param conll: The conll of this Exercise.
+ :type conll: str
+ """
+
+ self._conll = conll
+
+ @property
+ def eid(self):
+ """Gets the eid of this Exercise.
+
+ Unique identifier (UUID) for the exercise. # noqa: E501
+
+ :return: The eid of this Exercise.
+ :rtype: str
+ """
+ return self._eid
+
+ @eid.setter
+ def eid(self, eid):
+ """Sets the eid of this Exercise.
+
+ Unique identifier (UUID) for the exercise. # noqa: E501
+
+ :param eid: The eid of this Exercise.
+ :type eid: str
+ """
+ if eid is None:
+ raise ValueError("Invalid value for `eid`, must not be `None`") # noqa: E501
+
+ self._eid = eid
+
+ @property
+ def exercise_type(self):
+ """Gets the exercise_type of this Exercise.
+
+ Type of exercise, concerning interaction and layout. # noqa: E501
+
+ :return: The exercise_type of this Exercise.
+ :rtype: str
+ """
+ return self._exercise_type
+
+ @exercise_type.setter
+ def exercise_type(self, exercise_type):
+ """Sets the exercise_type of this Exercise.
+
+ Type of exercise, concerning interaction and layout. # noqa: E501
+
+ :param exercise_type: The exercise_type of this Exercise.
+ :type exercise_type: str
+ """
+
+ self._exercise_type = exercise_type
+
+ @property
+ def exercise_type_translation(self):
+ """Gets the exercise_type_translation of this Exercise.
+
+ Localized expression of the exercise type. # noqa: E501
+
+ :return: The exercise_type_translation of this Exercise.
+ :rtype: str
+ """
+ return self._exercise_type_translation
+
+ @exercise_type_translation.setter
+ def exercise_type_translation(self, exercise_type_translation):
+ """Sets the exercise_type_translation of this Exercise.
+
+ Localized expression of the exercise type. # noqa: E501
+
+ :param exercise_type_translation: The exercise_type_translation of this Exercise.
+ :type exercise_type_translation: str
+ """
+
+ self._exercise_type_translation = exercise_type_translation
+
+ @property
+ def language(self):
+ """Gets the language of this Exercise.
+
+ ISO 639-1 Language Code for the localization of exercise content. # noqa: E501
+
+ :return: The language of this Exercise.
+ :rtype: str
+ """
+ return self._language
+
+ @language.setter
+ def language(self, language):
+ """Sets the language of this Exercise.
+
+ ISO 639-1 Language Code for the localization of exercise content. # noqa: E501
+
+ :param language: The language of this Exercise.
+ :type language: str
+ """
+
+ self._language = language
+
+ @property
+ def last_access_time(self):
+ """Gets the last_access_time of this Exercise.
+
+ When the exercise was last accessed (as POSIX timestamp). # noqa: E501
+
+ :return: The last_access_time of this Exercise.
+ :rtype: float
+ """
+ return self._last_access_time
+
+ @last_access_time.setter
+ def last_access_time(self, last_access_time):
+ """Sets the last_access_time of this Exercise.
+
+ When the exercise was last accessed (as POSIX timestamp). # noqa: E501
+
+ :param last_access_time: The last_access_time of this Exercise.
+ :type last_access_time: float
+ """
+ if last_access_time is None:
+ raise ValueError("Invalid value for `last_access_time`, must not be `None`") # noqa: E501
+
+ self._last_access_time = last_access_time
+
+ @property
+ def solutions(self):
+ """Gets the solutions of this Exercise.
+
+ Correct solutions for the exercise. # noqa: E501
+
+ :return: The solutions of this Exercise.
+ :rtype: str
+ """
+ return self._solutions
+
+ @solutions.setter
+ def solutions(self, solutions):
+ """Sets the solutions of this Exercise.
+
+ Correct solutions for the exercise. # noqa: E501
+
+ :param solutions: The solutions of this Exercise.
+ :type solutions: str
+ """
+
+ self._solutions = solutions
+
+ @property
+ def text_complexity(self):
+ """Gets the text_complexity of this Exercise.
+
+ Overall text complexity as measured by the software's internal language analysis. # noqa: E501
+
+ :return: The text_complexity of this Exercise.
+ :rtype: float
+ """
+ return self._text_complexity
+
+ @text_complexity.setter
+ def text_complexity(self, text_complexity):
+ """Sets the text_complexity of this Exercise.
+
+ Overall text complexity as measured by the software's internal language analysis. # noqa: E501
+
+ :param text_complexity: The text_complexity of this Exercise.
+ :type text_complexity: float
+ """
+
+ self._text_complexity = text_complexity
+
+ @property
+ def urn(self):
+ """Gets the urn of this Exercise.
+
+ CTS URN for the text passage from which the exercise was created. # noqa: E501
+
+ :return: The urn of this Exercise.
+ :rtype: str
+ """
+ return self._urn
+
+ @urn.setter
+ def urn(self, urn):
+ """Sets the urn of this Exercise.
+
+ CTS URN for the text passage from which the exercise was created. # noqa: E501
+
+ :param urn: The urn of this Exercise.
+ :type urn: str
+ """
+
+ self._urn = urn
diff --git a/mc_backend/openapi/openapi_server/models/exercise_all_of.py b/mc_backend/openapi/openapi_server/models/exercise_all_of.py
new file mode 100644
index 0000000..c3801c6
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/exercise_all_of.py
@@ -0,0 +1,294 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class ExerciseAllOf(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, conll='', eid=None, exercise_type='', exercise_type_translation='', language='de', last_access_time=None, solutions='[]', text_complexity=0, urn=''): # noqa: E501
+ """ExerciseAllOf - a model defined in OpenAPI
+
+ :param conll: The conll of this ExerciseAllOf. # noqa: E501
+ :type conll: str
+ :param eid: The eid of this ExerciseAllOf. # noqa: E501
+ :type eid: str
+ :param exercise_type: The exercise_type of this ExerciseAllOf. # noqa: E501
+ :type exercise_type: str
+ :param exercise_type_translation: The exercise_type_translation of this ExerciseAllOf. # noqa: E501
+ :type exercise_type_translation: str
+ :param language: The language of this ExerciseAllOf. # noqa: E501
+ :type language: str
+ :param last_access_time: The last_access_time of this ExerciseAllOf. # noqa: E501
+ :type last_access_time: float
+ :param solutions: The solutions of this ExerciseAllOf. # noqa: E501
+ :type solutions: str
+ :param text_complexity: The text_complexity of this ExerciseAllOf. # noqa: E501
+ :type text_complexity: float
+ :param urn: The urn of this ExerciseAllOf. # noqa: E501
+ :type urn: str
+ """
+ self.openapi_types = {
+ 'conll': str,
+ 'eid': str,
+ 'exercise_type': str,
+ 'exercise_type_translation': str,
+ 'language': str,
+ 'last_access_time': float,
+ 'solutions': str,
+ 'text_complexity': float,
+ 'urn': str
+ }
+
+ self.attribute_map = {
+ 'conll': 'conll',
+ 'eid': 'eid',
+ 'exercise_type': 'exercise_type',
+ 'exercise_type_translation': 'exercise_type_translation',
+ 'language': 'language',
+ 'last_access_time': 'last_access_time',
+ 'solutions': 'solutions',
+ 'text_complexity': 'text_complexity',
+ 'urn': 'urn'
+ }
+
+ self._conll = conll
+ self._eid = eid
+ self._exercise_type = exercise_type
+ self._exercise_type_translation = exercise_type_translation
+ self._language = language
+ self._last_access_time = last_access_time
+ self._solutions = solutions
+ self._text_complexity = text_complexity
+ self._urn = urn
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'ExerciseAllOf':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The Exercise_allOf of this ExerciseAllOf. # noqa: E501
+ :rtype: ExerciseAllOf
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def conll(self):
+ """Gets the conll of this ExerciseAllOf.
+
+ CONLL-formatted linguistic annotations represented as a single string. # noqa: E501
+
+ :return: The conll of this ExerciseAllOf.
+ :rtype: str
+ """
+ return self._conll
+
+ @conll.setter
+ def conll(self, conll):
+ """Sets the conll of this ExerciseAllOf.
+
+ CONLL-formatted linguistic annotations represented as a single string. # noqa: E501
+
+ :param conll: The conll of this ExerciseAllOf.
+ :type conll: str
+ """
+
+ self._conll = conll
+
+ @property
+ def eid(self):
+ """Gets the eid of this ExerciseAllOf.
+
+ Unique identifier (UUID) for the exercise. # noqa: E501
+
+ :return: The eid of this ExerciseAllOf.
+ :rtype: str
+ """
+ return self._eid
+
+ @eid.setter
+ def eid(self, eid):
+ """Sets the eid of this ExerciseAllOf.
+
+ Unique identifier (UUID) for the exercise. # noqa: E501
+
+ :param eid: The eid of this ExerciseAllOf.
+ :type eid: str
+ """
+ if eid is None:
+ raise ValueError("Invalid value for `eid`, must not be `None`") # noqa: E501
+
+ self._eid = eid
+
+ @property
+ def exercise_type(self):
+ """Gets the exercise_type of this ExerciseAllOf.
+
+ Type of exercise, concerning interaction and layout. # noqa: E501
+
+ :return: The exercise_type of this ExerciseAllOf.
+ :rtype: str
+ """
+ return self._exercise_type
+
+ @exercise_type.setter
+ def exercise_type(self, exercise_type):
+ """Sets the exercise_type of this ExerciseAllOf.
+
+ Type of exercise, concerning interaction and layout. # noqa: E501
+
+ :param exercise_type: The exercise_type of this ExerciseAllOf.
+ :type exercise_type: str
+ """
+
+ self._exercise_type = exercise_type
+
+ @property
+ def exercise_type_translation(self):
+ """Gets the exercise_type_translation of this ExerciseAllOf.
+
+ Localized expression of the exercise type. # noqa: E501
+
+ :return: The exercise_type_translation of this ExerciseAllOf.
+ :rtype: str
+ """
+ return self._exercise_type_translation
+
+ @exercise_type_translation.setter
+ def exercise_type_translation(self, exercise_type_translation):
+ """Sets the exercise_type_translation of this ExerciseAllOf.
+
+ Localized expression of the exercise type. # noqa: E501
+
+ :param exercise_type_translation: The exercise_type_translation of this ExerciseAllOf.
+ :type exercise_type_translation: str
+ """
+
+ self._exercise_type_translation = exercise_type_translation
+
+ @property
+ def language(self):
+ """Gets the language of this ExerciseAllOf.
+
+ ISO 639-1 Language Code for the localization of exercise content. # noqa: E501
+
+ :return: The language of this ExerciseAllOf.
+ :rtype: str
+ """
+ return self._language
+
+ @language.setter
+ def language(self, language):
+ """Sets the language of this ExerciseAllOf.
+
+ ISO 639-1 Language Code for the localization of exercise content. # noqa: E501
+
+ :param language: The language of this ExerciseAllOf.
+ :type language: str
+ """
+
+ self._language = language
+
+ @property
+ def last_access_time(self):
+ """Gets the last_access_time of this ExerciseAllOf.
+
+ When the exercise was last accessed (as POSIX timestamp). # noqa: E501
+
+ :return: The last_access_time of this ExerciseAllOf.
+ :rtype: float
+ """
+ return self._last_access_time
+
+ @last_access_time.setter
+ def last_access_time(self, last_access_time):
+ """Sets the last_access_time of this ExerciseAllOf.
+
+ When the exercise was last accessed (as POSIX timestamp). # noqa: E501
+
+ :param last_access_time: The last_access_time of this ExerciseAllOf.
+ :type last_access_time: float
+ """
+ if last_access_time is None:
+ raise ValueError("Invalid value for `last_access_time`, must not be `None`") # noqa: E501
+
+ self._last_access_time = last_access_time
+
+ @property
+ def solutions(self):
+ """Gets the solutions of this ExerciseAllOf.
+
+ Correct solutions for the exercise. # noqa: E501
+
+ :return: The solutions of this ExerciseAllOf.
+ :rtype: str
+ """
+ return self._solutions
+
+ @solutions.setter
+ def solutions(self, solutions):
+ """Sets the solutions of this ExerciseAllOf.
+
+ Correct solutions for the exercise. # noqa: E501
+
+ :param solutions: The solutions of this ExerciseAllOf.
+ :type solutions: str
+ """
+
+ self._solutions = solutions
+
+ @property
+ def text_complexity(self):
+ """Gets the text_complexity of this ExerciseAllOf.
+
+ Overall text complexity as measured by the software's internal language analysis. # noqa: E501
+
+ :return: The text_complexity of this ExerciseAllOf.
+ :rtype: float
+ """
+ return self._text_complexity
+
+ @text_complexity.setter
+ def text_complexity(self, text_complexity):
+ """Sets the text_complexity of this ExerciseAllOf.
+
+ Overall text complexity as measured by the software's internal language analysis. # noqa: E501
+
+ :param text_complexity: The text_complexity of this ExerciseAllOf.
+ :type text_complexity: float
+ """
+
+ self._text_complexity = text_complexity
+
+ @property
+ def urn(self):
+ """Gets the urn of this ExerciseAllOf.
+
+ CTS URN for the text passage from which the exercise was created. # noqa: E501
+
+ :return: The urn of this ExerciseAllOf.
+ :rtype: str
+ """
+ return self._urn
+
+ @urn.setter
+ def urn(self, urn):
+ """Sets the urn of this ExerciseAllOf.
+
+ CTS URN for the text passage from which the exercise was created. # noqa: E501
+
+ :param urn: The urn of this ExerciseAllOf.
+ :type urn: str
+ """
+
+ self._urn = urn
diff --git a/mc_backend/openapi/openapi_server/models/exercise_base.py b/mc_backend/openapi/openapi_server/models/exercise_base.py
new file mode 100644
index 0000000..ece91ee
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/exercise_base.py
@@ -0,0 +1,262 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class ExerciseBase(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, correct_feedback='', general_feedback='', incorrect_feedback='', instructions='', partially_correct_feedback='', search_values='[]', work_author='', work_title=''): # noqa: E501
+ """ExerciseBase - a model defined in OpenAPI
+
+ :param correct_feedback: The correct_feedback of this ExerciseBase. # noqa: E501
+ :type correct_feedback: str
+ :param general_feedback: The general_feedback of this ExerciseBase. # noqa: E501
+ :type general_feedback: str
+ :param incorrect_feedback: The incorrect_feedback of this ExerciseBase. # noqa: E501
+ :type incorrect_feedback: str
+ :param instructions: The instructions of this ExerciseBase. # noqa: E501
+ :type instructions: str
+ :param partially_correct_feedback: The partially_correct_feedback of this ExerciseBase. # noqa: E501
+ :type partially_correct_feedback: str
+ :param search_values: The search_values of this ExerciseBase. # noqa: E501
+ :type search_values: str
+ :param work_author: The work_author of this ExerciseBase. # noqa: E501
+ :type work_author: str
+ :param work_title: The work_title of this ExerciseBase. # noqa: E501
+ :type work_title: str
+ """
+ self.openapi_types = {
+ 'correct_feedback': str,
+ 'general_feedback': str,
+ 'incorrect_feedback': str,
+ 'instructions': str,
+ 'partially_correct_feedback': str,
+ 'search_values': str,
+ 'work_author': str,
+ 'work_title': str
+ }
+
+ self.attribute_map = {
+ 'correct_feedback': 'correct_feedback',
+ 'general_feedback': 'general_feedback',
+ 'incorrect_feedback': 'incorrect_feedback',
+ 'instructions': 'instructions',
+ 'partially_correct_feedback': 'partially_correct_feedback',
+ 'search_values': 'search_values',
+ 'work_author': 'work_author',
+ 'work_title': 'work_title'
+ }
+
+ self._correct_feedback = correct_feedback
+ self._general_feedback = general_feedback
+ self._incorrect_feedback = incorrect_feedback
+ self._instructions = instructions
+ self._partially_correct_feedback = partially_correct_feedback
+ self._search_values = search_values
+ self._work_author = work_author
+ self._work_title = work_title
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'ExerciseBase':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The ExerciseBase of this ExerciseBase. # noqa: E501
+ :rtype: ExerciseBase
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def correct_feedback(self):
+ """Gets the correct_feedback of this ExerciseBase.
+
+ Feedback for successful completion of the exercise. # noqa: E501
+
+ :return: The correct_feedback of this ExerciseBase.
+ :rtype: str
+ """
+ return self._correct_feedback
+
+ @correct_feedback.setter
+ def correct_feedback(self, correct_feedback):
+ """Sets the correct_feedback of this ExerciseBase.
+
+ Feedback for successful completion of the exercise. # noqa: E501
+
+ :param correct_feedback: The correct_feedback of this ExerciseBase.
+ :type correct_feedback: str
+ """
+
+ self._correct_feedback = correct_feedback
+
+ @property
+ def general_feedback(self):
+ """Gets the general_feedback of this ExerciseBase.
+
+ Feedback for finishing the exercise. # noqa: E501
+
+ :return: The general_feedback of this ExerciseBase.
+ :rtype: str
+ """
+ return self._general_feedback
+
+ @general_feedback.setter
+ def general_feedback(self, general_feedback):
+ """Sets the general_feedback of this ExerciseBase.
+
+ Feedback for finishing the exercise. # noqa: E501
+
+ :param general_feedback: The general_feedback of this ExerciseBase.
+ :type general_feedback: str
+ """
+
+ self._general_feedback = general_feedback
+
+ @property
+ def incorrect_feedback(self):
+ """Gets the incorrect_feedback of this ExerciseBase.
+
+ Feedback for failing to complete the exercise successfully. # noqa: E501
+
+ :return: The incorrect_feedback of this ExerciseBase.
+ :rtype: str
+ """
+ return self._incorrect_feedback
+
+ @incorrect_feedback.setter
+ def incorrect_feedback(self, incorrect_feedback):
+ """Sets the incorrect_feedback of this ExerciseBase.
+
+ Feedback for failing to complete the exercise successfully. # noqa: E501
+
+ :param incorrect_feedback: The incorrect_feedback of this ExerciseBase.
+ :type incorrect_feedback: str
+ """
+
+ self._incorrect_feedback = incorrect_feedback
+
+ @property
+ def instructions(self):
+ """Gets the instructions of this ExerciseBase.
+
+ Hints for how to complete the exercise. # noqa: E501
+
+ :return: The instructions of this ExerciseBase.
+ :rtype: str
+ """
+ return self._instructions
+
+ @instructions.setter
+ def instructions(self, instructions):
+ """Sets the instructions of this ExerciseBase.
+
+ Hints for how to complete the exercise. # noqa: E501
+
+ :param instructions: The instructions of this ExerciseBase.
+ :type instructions: str
+ """
+
+ self._instructions = instructions
+
+ @property
+ def partially_correct_feedback(self):
+ """Gets the partially_correct_feedback of this ExerciseBase.
+
+ Feedback for successfully completing certain parts of the exercise. # noqa: E501
+
+ :return: The partially_correct_feedback of this ExerciseBase.
+ :rtype: str
+ """
+ return self._partially_correct_feedback
+
+ @partially_correct_feedback.setter
+ def partially_correct_feedback(self, partially_correct_feedback):
+ """Sets the partially_correct_feedback of this ExerciseBase.
+
+ Feedback for successfully completing certain parts of the exercise. # noqa: E501
+
+ :param partially_correct_feedback: The partially_correct_feedback of this ExerciseBase.
+ :type partially_correct_feedback: str
+ """
+
+ self._partially_correct_feedback = partially_correct_feedback
+
+ @property
+ def search_values(self):
+ """Gets the search_values of this ExerciseBase.
+
+ Search queries that were used to build the exercise. # noqa: E501
+
+ :return: The search_values of this ExerciseBase.
+ :rtype: str
+ """
+ return self._search_values
+
+ @search_values.setter
+ def search_values(self, search_values):
+ """Sets the search_values of this ExerciseBase.
+
+ Search queries that were used to build the exercise. # noqa: E501
+
+ :param search_values: The search_values of this ExerciseBase.
+ :type search_values: str
+ """
+
+ self._search_values = search_values
+
+ @property
+ def work_author(self):
+ """Gets the work_author of this ExerciseBase.
+
+ Name of the person who wrote the base text for the exercise. # noqa: E501
+
+ :return: The work_author of this ExerciseBase.
+ :rtype: str
+ """
+ return self._work_author
+
+ @work_author.setter
+ def work_author(self, work_author):
+ """Sets the work_author of this ExerciseBase.
+
+ Name of the person who wrote the base text for the exercise. # noqa: E501
+
+ :param work_author: The work_author of this ExerciseBase.
+ :type work_author: str
+ """
+
+ self._work_author = work_author
+
+ @property
+ def work_title(self):
+ """Gets the work_title of this ExerciseBase.
+
+ Title of the base text for the exercise. # noqa: E501
+
+ :return: The work_title of this ExerciseBase.
+ :rtype: str
+ """
+ return self._work_title
+
+ @work_title.setter
+ def work_title(self, work_title):
+ """Sets the work_title of this ExerciseBase.
+
+ Title of the base text for the exercise. # noqa: E501
+
+ :param work_title: The work_title of this ExerciseBase.
+ :type work_title: str
+ """
+
+ self._work_title = work_title
diff --git a/mc_backend/openapi/openapi_server/models/graph_data.py b/mc_backend/openapi/openapi_server/models/graph_data.py
new file mode 100644
index 0000000..dc97ff8
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/graph_data.py
@@ -0,0 +1,186 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server.models.link import Link
+from openapi.openapi_server.models.node import Node
+from openapi.openapi_server import util
+
+from openapi.openapi_server.models.link import Link # noqa: E501
+from openapi.openapi_server.models.node import Node # noqa: E501
+
+class GraphData(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, directed=None, graph=None, links=None, multigraph=None, nodes=None): # noqa: E501
+ """GraphData - a model defined in OpenAPI
+
+ :param directed: The directed of this GraphData. # noqa: E501
+ :type directed: bool
+ :param graph: The graph of this GraphData. # noqa: E501
+ :type graph: object
+ :param links: The links of this GraphData. # noqa: E501
+ :type links: List[Link]
+ :param multigraph: The multigraph of this GraphData. # noqa: E501
+ :type multigraph: bool
+ :param nodes: The nodes of this GraphData. # noqa: E501
+ :type nodes: List[Node]
+ """
+ self.openapi_types = {
+ 'directed': bool,
+ 'graph': object,
+ 'links': List[Link],
+ 'multigraph': bool,
+ 'nodes': List[Node]
+ }
+
+ self.attribute_map = {
+ 'directed': 'directed',
+ 'graph': 'graph',
+ 'links': 'links',
+ 'multigraph': 'multigraph',
+ 'nodes': 'nodes'
+ }
+
+ self._directed = directed
+ self._graph = graph
+ self._links = links
+ self._multigraph = multigraph
+ self._nodes = nodes
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'GraphData':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The GraphData of this GraphData. # noqa: E501
+ :rtype: GraphData
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def directed(self):
+ """Gets the directed of this GraphData.
+
+ Whether edges in the returned graph are directed. # noqa: E501
+
+ :return: The directed of this GraphData.
+ :rtype: bool
+ """
+ return self._directed
+
+ @directed.setter
+ def directed(self, directed):
+ """Sets the directed of this GraphData.
+
+ Whether edges in the returned graph are directed. # noqa: E501
+
+ :param directed: The directed of this GraphData.
+ :type directed: bool
+ """
+
+ self._directed = directed
+
+ @property
+ def graph(self):
+ """Gets the graph of this GraphData.
+
+ Additional graph data. # noqa: E501
+
+ :return: The graph of this GraphData.
+ :rtype: object
+ """
+ return self._graph
+
+ @graph.setter
+ def graph(self, graph):
+ """Sets the graph of this GraphData.
+
+ Additional graph data. # noqa: E501
+
+ :param graph: The graph of this GraphData.
+ :type graph: object
+ """
+
+ self._graph = graph
+
+ @property
+ def links(self):
+ """Gets the links of this GraphData.
+
+ List of edges for the graph. # noqa: E501
+
+ :return: The links of this GraphData.
+ :rtype: List[Link]
+ """
+ return self._links
+
+ @links.setter
+ def links(self, links):
+ """Sets the links of this GraphData.
+
+ List of edges for the graph. # noqa: E501
+
+ :param links: The links of this GraphData.
+ :type links: List[Link]
+ """
+ if links is None:
+ raise ValueError("Invalid value for `links`, must not be `None`") # noqa: E501
+
+ self._links = links
+
+ @property
+ def multigraph(self):
+ """Gets the multigraph of this GraphData.
+
+ Whether the graph consists of multiple subgraphs. # noqa: E501
+
+ :return: The multigraph of this GraphData.
+ :rtype: bool
+ """
+ return self._multigraph
+
+ @multigraph.setter
+ def multigraph(self, multigraph):
+ """Sets the multigraph of this GraphData.
+
+ Whether the graph consists of multiple subgraphs. # noqa: E501
+
+ :param multigraph: The multigraph of this GraphData.
+ :type multigraph: bool
+ """
+
+ self._multigraph = multigraph
+
+ @property
+ def nodes(self):
+ """Gets the nodes of this GraphData.
+
+ List of nodes for the graph. # noqa: E501
+
+ :return: The nodes of this GraphData.
+ :rtype: List[Node]
+ """
+ return self._nodes
+
+ @nodes.setter
+ def nodes(self, nodes):
+ """Sets the nodes of this GraphData.
+
+ List of nodes for the graph. # noqa: E501
+
+ :param nodes: The nodes of this GraphData.
+ :type nodes: List[Node]
+ """
+ if nodes is None:
+ raise ValueError("Invalid value for `nodes`, must not be `None`") # noqa: E501
+
+ self._nodes = nodes
diff --git a/mc_backend/openapi/openapi_server/models/inline_response200.py b/mc_backend/openapi/openapi_server/models/inline_response200.py
new file mode 100644
index 0000000..180b848
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/inline_response200.py
@@ -0,0 +1,354 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server.models.inline_response200_frequency_analysis import InlineResponse200FrequencyAnalysis
+from openapi.openapi_server.models.inline_response200_text_complexity import InlineResponse200TextComplexity
+from openapi.openapi_server.models.link import Link
+from openapi.openapi_server.models.node import Node
+from openapi.openapi_server.models.solution import Solution
+from openapi.openapi_server import util
+
+from openapi.openapi_server.models.inline_response200_frequency_analysis import InlineResponse200FrequencyAnalysis # noqa: E501
+from openapi.openapi_server.models.inline_response200_text_complexity import InlineResponse200TextComplexity # noqa: E501
+from openapi.openapi_server.models.link import Link # noqa: E501
+from openapi.openapi_server.models.node import Node # noqa: E501
+from openapi.openapi_server.models.solution import Solution # noqa: E501
+
+class InlineResponse200(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, directed=None, exercise_id=None, exercise_type=None, frequency_analysis=None, graph=None, links=None, multigraph=None, nodes=None, solutions=None, text_complexity=None, uri=None): # noqa: E501
+ """InlineResponse200 - a model defined in OpenAPI
+
+ :param directed: The directed of this InlineResponse200. # noqa: E501
+ :type directed: bool
+ :param exercise_id: The exercise_id of this InlineResponse200. # noqa: E501
+ :type exercise_id: str
+ :param exercise_type: The exercise_type of this InlineResponse200. # noqa: E501
+ :type exercise_type: str
+ :param frequency_analysis: The frequency_analysis of this InlineResponse200. # noqa: E501
+ :type frequency_analysis: List[InlineResponse200FrequencyAnalysis]
+ :param graph: The graph of this InlineResponse200. # noqa: E501
+ :type graph: object
+ :param links: The links of this InlineResponse200. # noqa: E501
+ :type links: List[Link]
+ :param multigraph: The multigraph of this InlineResponse200. # noqa: E501
+ :type multigraph: bool
+ :param nodes: The nodes of this InlineResponse200. # noqa: E501
+ :type nodes: List[Node]
+ :param solutions: The solutions of this InlineResponse200. # noqa: E501
+ :type solutions: List[Solution]
+ :param text_complexity: The text_complexity of this InlineResponse200. # noqa: E501
+ :type text_complexity: InlineResponse200TextComplexity
+ :param uri: The uri of this InlineResponse200. # noqa: E501
+ :type uri: str
+ """
+ self.openapi_types = {
+ 'directed': bool,
+ 'exercise_id': str,
+ 'exercise_type': str,
+ 'frequency_analysis': List[InlineResponse200FrequencyAnalysis],
+ 'graph': object,
+ 'links': List[Link],
+ 'multigraph': bool,
+ 'nodes': List[Node],
+ 'solutions': List[Solution],
+ 'text_complexity': InlineResponse200TextComplexity,
+ 'uri': str
+ }
+
+ self.attribute_map = {
+ 'directed': 'directed',
+ 'exercise_id': 'exercise_id',
+ 'exercise_type': 'exercise_type',
+ 'frequency_analysis': 'frequency_analysis',
+ 'graph': 'graph',
+ 'links': 'links',
+ 'multigraph': 'multigraph',
+ 'nodes': 'nodes',
+ 'solutions': 'solutions',
+ 'text_complexity': 'text_complexity',
+ 'uri': 'uri'
+ }
+
+ self._directed = directed
+ self._exercise_id = exercise_id
+ self._exercise_type = exercise_type
+ self._frequency_analysis = frequency_analysis
+ self._graph = graph
+ self._links = links
+ self._multigraph = multigraph
+ self._nodes = nodes
+ self._solutions = solutions
+ self._text_complexity = text_complexity
+ self._uri = uri
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'InlineResponse200':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The inline_response_200 of this InlineResponse200. # noqa: E501
+ :rtype: InlineResponse200
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def directed(self):
+ """Gets the directed of this InlineResponse200.
+
+ Whether edges in the returned graph are directed. # noqa: E501
+
+ :return: The directed of this InlineResponse200.
+ :rtype: bool
+ """
+ return self._directed
+
+ @directed.setter
+ def directed(self, directed):
+ """Sets the directed of this InlineResponse200.
+
+ Whether edges in the returned graph are directed. # noqa: E501
+
+ :param directed: The directed of this InlineResponse200.
+ :type directed: bool
+ """
+
+ self._directed = directed
+
+ @property
+ def exercise_id(self):
+ """Gets the exercise_id of this InlineResponse200.
+
+ Unique identifier (UUID) for the exercise. # noqa: E501
+
+ :return: The exercise_id of this InlineResponse200.
+ :rtype: str
+ """
+ return self._exercise_id
+
+ @exercise_id.setter
+ def exercise_id(self, exercise_id):
+ """Sets the exercise_id of this InlineResponse200.
+
+ Unique identifier (UUID) for the exercise. # noqa: E501
+
+ :param exercise_id: The exercise_id of this InlineResponse200.
+ :type exercise_id: str
+ """
+
+ self._exercise_id = exercise_id
+
+ @property
+ def exercise_type(self):
+ """Gets the exercise_type of this InlineResponse200.
+
+ Type of exercise, concerning interaction and layout. # noqa: E501
+
+ :return: The exercise_type of this InlineResponse200.
+ :rtype: str
+ """
+ return self._exercise_type
+
+ @exercise_type.setter
+ def exercise_type(self, exercise_type):
+ """Sets the exercise_type of this InlineResponse200.
+
+ Type of exercise, concerning interaction and layout. # noqa: E501
+
+ :param exercise_type: The exercise_type of this InlineResponse200.
+ :type exercise_type: str
+ """
+
+ self._exercise_type = exercise_type
+
+ @property
+ def frequency_analysis(self):
+ """Gets the frequency_analysis of this InlineResponse200.
+
+ List of items with frequency data for linguistic phenomena. # noqa: E501
+
+ :return: The frequency_analysis of this InlineResponse200.
+ :rtype: List[InlineResponse200FrequencyAnalysis]
+ """
+ return self._frequency_analysis
+
+ @frequency_analysis.setter
+ def frequency_analysis(self, frequency_analysis):
+ """Sets the frequency_analysis of this InlineResponse200.
+
+ List of items with frequency data for linguistic phenomena. # noqa: E501
+
+ :param frequency_analysis: The frequency_analysis of this InlineResponse200.
+ :type frequency_analysis: List[InlineResponse200FrequencyAnalysis]
+ """
+
+ self._frequency_analysis = frequency_analysis
+
+ @property
+ def graph(self):
+ """Gets the graph of this InlineResponse200.
+
+ Additional graph data. # noqa: E501
+
+ :return: The graph of this InlineResponse200.
+ :rtype: object
+ """
+ return self._graph
+
+ @graph.setter
+ def graph(self, graph):
+ """Sets the graph of this InlineResponse200.
+
+ Additional graph data. # noqa: E501
+
+ :param graph: The graph of this InlineResponse200.
+ :type graph: object
+ """
+
+ self._graph = graph
+
+ @property
+ def links(self):
+ """Gets the links of this InlineResponse200.
+
+ List of edges for the graph. # noqa: E501
+
+ :return: The links of this InlineResponse200.
+ :rtype: List[Link]
+ """
+ return self._links
+
+ @links.setter
+ def links(self, links):
+ """Sets the links of this InlineResponse200.
+
+ List of edges for the graph. # noqa: E501
+
+ :param links: The links of this InlineResponse200.
+ :type links: List[Link]
+ """
+
+ self._links = links
+
+ @property
+ def multigraph(self):
+ """Gets the multigraph of this InlineResponse200.
+
+ Whether the graph consists of multiple subgraphs. # noqa: E501
+
+ :return: The multigraph of this InlineResponse200.
+ :rtype: bool
+ """
+ return self._multigraph
+
+ @multigraph.setter
+ def multigraph(self, multigraph):
+ """Sets the multigraph of this InlineResponse200.
+
+ Whether the graph consists of multiple subgraphs. # noqa: E501
+
+ :param multigraph: The multigraph of this InlineResponse200.
+ :type multigraph: bool
+ """
+
+ self._multigraph = multigraph
+
+ @property
+ def nodes(self):
+ """Gets the nodes of this InlineResponse200.
+
+ List of nodes for the graph. # noqa: E501
+
+ :return: The nodes of this InlineResponse200.
+ :rtype: List[Node]
+ """
+ return self._nodes
+
+ @nodes.setter
+ def nodes(self, nodes):
+ """Sets the nodes of this InlineResponse200.
+
+ List of nodes for the graph. # noqa: E501
+
+ :param nodes: The nodes of this InlineResponse200.
+ :type nodes: List[Node]
+ """
+
+ self._nodes = nodes
+
+ @property
+ def solutions(self):
+ """Gets the solutions of this InlineResponse200.
+
+ Correct solutions for this exercise. # noqa: E501
+
+ :return: The solutions of this InlineResponse200.
+ :rtype: List[Solution]
+ """
+ return self._solutions
+
+ @solutions.setter
+ def solutions(self, solutions):
+ """Sets the solutions of this InlineResponse200.
+
+ Correct solutions for this exercise. # noqa: E501
+
+ :param solutions: The solutions of this InlineResponse200.
+ :type solutions: List[Solution]
+ """
+
+ self._solutions = solutions
+
+ @property
+ def text_complexity(self):
+ """Gets the text_complexity of this InlineResponse200.
+
+
+ :return: The text_complexity of this InlineResponse200.
+ :rtype: InlineResponse200TextComplexity
+ """
+ return self._text_complexity
+
+ @text_complexity.setter
+ def text_complexity(self, text_complexity):
+ """Sets the text_complexity of this InlineResponse200.
+
+
+ :param text_complexity: The text_complexity of this InlineResponse200.
+ :type text_complexity: InlineResponse200TextComplexity
+ """
+
+ self._text_complexity = text_complexity
+
+ @property
+ def uri(self):
+ """Gets the uri of this InlineResponse200.
+
+ URI for accessing the exercise in this API. # noqa: E501
+
+ :return: The uri of this InlineResponse200.
+ :rtype: str
+ """
+ return self._uri
+
+ @uri.setter
+ def uri(self, uri):
+ """Sets the uri of this InlineResponse200.
+
+ URI for accessing the exercise in this API. # noqa: E501
+
+ :param uri: The uri of this InlineResponse200.
+ :type uri: str
+ """
+
+ self._uri = uri
diff --git a/mc_backend/openapi/openapi_server/models/inline_response200_frequency_analysis.py b/mc_backend/openapi/openapi_server/models/inline_response200_frequency_analysis.py
new file mode 100644
index 0000000..8a5e967
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/inline_response200_frequency_analysis.py
@@ -0,0 +1,122 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class InlineResponse200FrequencyAnalysis(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, count=None, phenomena=None, values=None): # noqa: E501
+ """InlineResponse200FrequencyAnalysis - a model defined in OpenAPI
+
+ :param count: The count of this InlineResponse200FrequencyAnalysis. # noqa: E501
+ :type count: int
+ :param phenomena: The phenomena of this InlineResponse200FrequencyAnalysis. # noqa: E501
+ :type phenomena: List[str]
+ :param values: The values of this InlineResponse200FrequencyAnalysis. # noqa: E501
+ :type values: List[str]
+ """
+ self.openapi_types = {
+ 'count': int,
+ 'phenomena': List[str],
+ 'values': List[str]
+ }
+
+ self.attribute_map = {
+ 'count': 'count',
+ 'phenomena': 'phenomena',
+ 'values': 'values'
+ }
+
+ self._count = count
+ self._phenomena = phenomena
+ self._values = values
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'InlineResponse200FrequencyAnalysis':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The inline_response_200_frequency_analysis of this InlineResponse200FrequencyAnalysis. # noqa: E501
+ :rtype: InlineResponse200FrequencyAnalysis
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def count(self):
+ """Gets the count of this InlineResponse200FrequencyAnalysis.
+
+ How often the given combination of values occurred. # noqa: E501
+
+ :return: The count of this InlineResponse200FrequencyAnalysis.
+ :rtype: int
+ """
+ return self._count
+
+ @count.setter
+ def count(self, count):
+ """Sets the count of this InlineResponse200FrequencyAnalysis.
+
+ How often the given combination of values occurred. # noqa: E501
+
+ :param count: The count of this InlineResponse200FrequencyAnalysis.
+ :type count: int
+ """
+
+ self._count = count
+
+ @property
+ def phenomena(self):
+ """Gets the phenomena of this InlineResponse200FrequencyAnalysis.
+
+ Labels for the phenomena described in this frequency entry. # noqa: E501
+
+ :return: The phenomena of this InlineResponse200FrequencyAnalysis.
+ :rtype: List[str]
+ """
+ return self._phenomena
+
+ @phenomena.setter
+ def phenomena(self, phenomena):
+ """Sets the phenomena of this InlineResponse200FrequencyAnalysis.
+
+ Labels for the phenomena described in this frequency entry. # noqa: E501
+
+ :param phenomena: The phenomena of this InlineResponse200FrequencyAnalysis.
+ :type phenomena: List[str]
+ """
+
+ self._phenomena = phenomena
+
+ @property
+ def values(self):
+ """Gets the values of this InlineResponse200FrequencyAnalysis.
+
+ Values for the phenomena described in this frequency entry. # noqa: E501
+
+ :return: The values of this InlineResponse200FrequencyAnalysis.
+ :rtype: List[str]
+ """
+ return self._values
+
+ @values.setter
+ def values(self, values):
+ """Sets the values of this InlineResponse200FrequencyAnalysis.
+
+ Values for the phenomena described in this frequency entry. # noqa: E501
+
+ :param values: The values of this InlineResponse200FrequencyAnalysis.
+ :type values: List[str]
+ """
+
+ self._values = values
diff --git a/mc_backend/openapi/openapi_server/models/inline_response200_text_complexity.py b/mc_backend/openapi/openapi_server/models/inline_response200_text_complexity.py
new file mode 100644
index 0000000..fdc9c0f
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/inline_response200_text_complexity.py
@@ -0,0 +1,462 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class InlineResponse200TextComplexity(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, all=None, avg_w_len=None, avg_w_per_sent=None, lex_den=None, n_abl_abs=None, n_clause=None, n_gerund=None, n_inf=None, n_part=None, n_punct=None, n_sent=None, n_subclause=None, n_types=None, n_w=None, pos=None): # noqa: E501
+ """InlineResponse200TextComplexity - a model defined in OpenAPI
+
+ :param all: The all of this InlineResponse200TextComplexity. # noqa: E501
+ :type all: float
+ :param avg_w_len: The avg_w_len of this InlineResponse200TextComplexity. # noqa: E501
+ :type avg_w_len: float
+ :param avg_w_per_sent: The avg_w_per_sent of this InlineResponse200TextComplexity. # noqa: E501
+ :type avg_w_per_sent: float
+ :param lex_den: The lex_den of this InlineResponse200TextComplexity. # noqa: E501
+ :type lex_den: float
+ :param n_abl_abs: The n_abl_abs of this InlineResponse200TextComplexity. # noqa: E501
+ :type n_abl_abs: int
+ :param n_clause: The n_clause of this InlineResponse200TextComplexity. # noqa: E501
+ :type n_clause: int
+ :param n_gerund: The n_gerund of this InlineResponse200TextComplexity. # noqa: E501
+ :type n_gerund: int
+ :param n_inf: The n_inf of this InlineResponse200TextComplexity. # noqa: E501
+ :type n_inf: int
+ :param n_part: The n_part of this InlineResponse200TextComplexity. # noqa: E501
+ :type n_part: int
+ :param n_punct: The n_punct of this InlineResponse200TextComplexity. # noqa: E501
+ :type n_punct: int
+ :param n_sent: The n_sent of this InlineResponse200TextComplexity. # noqa: E501
+ :type n_sent: int
+ :param n_subclause: The n_subclause of this InlineResponse200TextComplexity. # noqa: E501
+ :type n_subclause: int
+ :param n_types: The n_types of this InlineResponse200TextComplexity. # noqa: E501
+ :type n_types: int
+ :param n_w: The n_w of this InlineResponse200TextComplexity. # noqa: E501
+ :type n_w: int
+ :param pos: The pos of this InlineResponse200TextComplexity. # noqa: E501
+ :type pos: int
+ """
+ self.openapi_types = {
+ 'all': float,
+ 'avg_w_len': float,
+ 'avg_w_per_sent': float,
+ 'lex_den': float,
+ 'n_abl_abs': int,
+ 'n_clause': int,
+ 'n_gerund': int,
+ 'n_inf': int,
+ 'n_part': int,
+ 'n_punct': int,
+ 'n_sent': int,
+ 'n_subclause': int,
+ 'n_types': int,
+ 'n_w': int,
+ 'pos': int
+ }
+
+ self.attribute_map = {
+ 'all': 'all',
+ 'avg_w_len': 'avg_w_len',
+ 'avg_w_per_sent': 'avg_w_per_sent',
+ 'lex_den': 'lex_den',
+ 'n_abl_abs': 'n_abl_abs',
+ 'n_clause': 'n_clause',
+ 'n_gerund': 'n_gerund',
+ 'n_inf': 'n_inf',
+ 'n_part': 'n_part',
+ 'n_punct': 'n_punct',
+ 'n_sent': 'n_sent',
+ 'n_subclause': 'n_subclause',
+ 'n_types': 'n_types',
+ 'n_w': 'n_w',
+ 'pos': 'pos'
+ }
+
+ self._all = all
+ self._avg_w_len = avg_w_len
+ self._avg_w_per_sent = avg_w_per_sent
+ self._lex_den = lex_den
+ self._n_abl_abs = n_abl_abs
+ self._n_clause = n_clause
+ self._n_gerund = n_gerund
+ self._n_inf = n_inf
+ self._n_part = n_part
+ self._n_punct = n_punct
+ self._n_sent = n_sent
+ self._n_subclause = n_subclause
+ self._n_types = n_types
+ self._n_w = n_w
+ self._pos = pos
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'InlineResponse200TextComplexity':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The inline_response_200_text_complexity of this InlineResponse200TextComplexity. # noqa: E501
+ :rtype: InlineResponse200TextComplexity
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def all(self):
+ """Gets the all of this InlineResponse200TextComplexity.
+
+ Overall text complexity of the given corpus. # noqa: E501
+
+ :return: The all of this InlineResponse200TextComplexity.
+ :rtype: float
+ """
+ return self._all
+
+ @all.setter
+ def all(self, all):
+ """Sets the all of this InlineResponse200TextComplexity.
+
+ Overall text complexity of the given corpus. # noqa: E501
+
+ :param all: The all of this InlineResponse200TextComplexity.
+ :type all: float
+ """
+
+ self._all = all
+
+ @property
+ def avg_w_len(self):
+ """Gets the avg_w_len of this InlineResponse200TextComplexity.
+
+ Average length of a word in the given corpus. # noqa: E501
+
+ :return: The avg_w_len of this InlineResponse200TextComplexity.
+ :rtype: float
+ """
+ return self._avg_w_len
+
+ @avg_w_len.setter
+ def avg_w_len(self, avg_w_len):
+ """Sets the avg_w_len of this InlineResponse200TextComplexity.
+
+ Average length of a word in the given corpus. # noqa: E501
+
+ :param avg_w_len: The avg_w_len of this InlineResponse200TextComplexity.
+ :type avg_w_len: float
+ """
+
+ self._avg_w_len = avg_w_len
+
+ @property
+ def avg_w_per_sent(self):
+ """Gets the avg_w_per_sent of this InlineResponse200TextComplexity.
+
+ Average number of words per sentence. # noqa: E501
+
+ :return: The avg_w_per_sent of this InlineResponse200TextComplexity.
+ :rtype: float
+ """
+ return self._avg_w_per_sent
+
+ @avg_w_per_sent.setter
+ def avg_w_per_sent(self, avg_w_per_sent):
+ """Sets the avg_w_per_sent of this InlineResponse200TextComplexity.
+
+ Average number of words per sentence. # noqa: E501
+
+ :param avg_w_per_sent: The avg_w_per_sent of this InlineResponse200TextComplexity.
+ :type avg_w_per_sent: float
+ """
+
+ self._avg_w_per_sent = avg_w_per_sent
+
+ @property
+ def lex_den(self):
+ """Gets the lex_den of this InlineResponse200TextComplexity.
+
+ Lexical density of the given corpus. # noqa: E501
+
+ :return: The lex_den of this InlineResponse200TextComplexity.
+ :rtype: float
+ """
+ return self._lex_den
+
+ @lex_den.setter
+ def lex_den(self, lex_den):
+ """Sets the lex_den of this InlineResponse200TextComplexity.
+
+ Lexical density of the given corpus. # noqa: E501
+
+ :param lex_den: The lex_den of this InlineResponse200TextComplexity.
+ :type lex_den: float
+ """
+ if lex_den is not None and lex_den > 1: # noqa: E501
+ raise ValueError("Invalid value for `lex_den`, must be a value less than or equal to `1`") # noqa: E501
+ if lex_den is not None and lex_den < 0: # noqa: E501
+ raise ValueError("Invalid value for `lex_den`, must be a value greater than or equal to `0`") # noqa: E501
+
+ self._lex_den = lex_den
+
+ @property
+ def n_abl_abs(self):
+ """Gets the n_abl_abs of this InlineResponse200TextComplexity.
+
+ Number of ablativi absoluti in the given corpus. # noqa: E501
+
+ :return: The n_abl_abs of this InlineResponse200TextComplexity.
+ :rtype: int
+ """
+ return self._n_abl_abs
+
+ @n_abl_abs.setter
+ def n_abl_abs(self, n_abl_abs):
+ """Sets the n_abl_abs of this InlineResponse200TextComplexity.
+
+ Number of ablativi absoluti in the given corpus. # noqa: E501
+
+ :param n_abl_abs: The n_abl_abs of this InlineResponse200TextComplexity.
+ :type n_abl_abs: int
+ """
+
+ self._n_abl_abs = n_abl_abs
+
+ @property
+ def n_clause(self):
+ """Gets the n_clause of this InlineResponse200TextComplexity.
+
+ Number of clauses in the given corpus. # noqa: E501
+
+ :return: The n_clause of this InlineResponse200TextComplexity.
+ :rtype: int
+ """
+ return self._n_clause
+
+ @n_clause.setter
+ def n_clause(self, n_clause):
+ """Sets the n_clause of this InlineResponse200TextComplexity.
+
+ Number of clauses in the given corpus. # noqa: E501
+
+ :param n_clause: The n_clause of this InlineResponse200TextComplexity.
+ :type n_clause: int
+ """
+
+ self._n_clause = n_clause
+
+ @property
+ def n_gerund(self):
+ """Gets the n_gerund of this InlineResponse200TextComplexity.
+
+ Number of gerunds in the given corpus. # noqa: E501
+
+ :return: The n_gerund of this InlineResponse200TextComplexity.
+ :rtype: int
+ """
+ return self._n_gerund
+
+ @n_gerund.setter
+ def n_gerund(self, n_gerund):
+ """Sets the n_gerund of this InlineResponse200TextComplexity.
+
+ Number of gerunds in the given corpus. # noqa: E501
+
+ :param n_gerund: The n_gerund of this InlineResponse200TextComplexity.
+ :type n_gerund: int
+ """
+
+ self._n_gerund = n_gerund
+
+ @property
+ def n_inf(self):
+ """Gets the n_inf of this InlineResponse200TextComplexity.
+
+ Number of infinitives in the given corpus. # noqa: E501
+
+ :return: The n_inf of this InlineResponse200TextComplexity.
+ :rtype: int
+ """
+ return self._n_inf
+
+ @n_inf.setter
+ def n_inf(self, n_inf):
+ """Sets the n_inf of this InlineResponse200TextComplexity.
+
+ Number of infinitives in the given corpus. # noqa: E501
+
+ :param n_inf: The n_inf of this InlineResponse200TextComplexity.
+ :type n_inf: int
+ """
+
+ self._n_inf = n_inf
+
+ @property
+ def n_part(self):
+ """Gets the n_part of this InlineResponse200TextComplexity.
+
+ Number of participles in the given corpus. # noqa: E501
+
+ :return: The n_part of this InlineResponse200TextComplexity.
+ :rtype: int
+ """
+ return self._n_part
+
+ @n_part.setter
+ def n_part(self, n_part):
+ """Sets the n_part of this InlineResponse200TextComplexity.
+
+ Number of participles in the given corpus. # noqa: E501
+
+ :param n_part: The n_part of this InlineResponse200TextComplexity.
+ :type n_part: int
+ """
+
+ self._n_part = n_part
+
+ @property
+ def n_punct(self):
+ """Gets the n_punct of this InlineResponse200TextComplexity.
+
+ Number of punctuation signs in the given corpus. # noqa: E501
+
+ :return: The n_punct of this InlineResponse200TextComplexity.
+ :rtype: int
+ """
+ return self._n_punct
+
+ @n_punct.setter
+ def n_punct(self, n_punct):
+ """Sets the n_punct of this InlineResponse200TextComplexity.
+
+ Number of punctuation signs in the given corpus. # noqa: E501
+
+ :param n_punct: The n_punct of this InlineResponse200TextComplexity.
+ :type n_punct: int
+ """
+
+ self._n_punct = n_punct
+
+ @property
+ def n_sent(self):
+ """Gets the n_sent of this InlineResponse200TextComplexity.
+
+ Number of sentences in the given corpus. # noqa: E501
+
+ :return: The n_sent of this InlineResponse200TextComplexity.
+ :rtype: int
+ """
+ return self._n_sent
+
+ @n_sent.setter
+ def n_sent(self, n_sent):
+ """Sets the n_sent of this InlineResponse200TextComplexity.
+
+ Number of sentences in the given corpus. # noqa: E501
+
+ :param n_sent: The n_sent of this InlineResponse200TextComplexity.
+ :type n_sent: int
+ """
+
+ self._n_sent = n_sent
+
+ @property
+ def n_subclause(self):
+ """Gets the n_subclause of this InlineResponse200TextComplexity.
+
+ Number of subclauses in the given corpus. # noqa: E501
+
+ :return: The n_subclause of this InlineResponse200TextComplexity.
+ :rtype: int
+ """
+ return self._n_subclause
+
+ @n_subclause.setter
+ def n_subclause(self, n_subclause):
+ """Sets the n_subclause of this InlineResponse200TextComplexity.
+
+ Number of subclauses in the given corpus. # noqa: E501
+
+ :param n_subclause: The n_subclause of this InlineResponse200TextComplexity.
+ :type n_subclause: int
+ """
+
+ self._n_subclause = n_subclause
+
+ @property
+ def n_types(self):
+ """Gets the n_types of this InlineResponse200TextComplexity.
+
+ Number of distinct word forms in the given corpus. # noqa: E501
+
+ :return: The n_types of this InlineResponse200TextComplexity.
+ :rtype: int
+ """
+ return self._n_types
+
+ @n_types.setter
+ def n_types(self, n_types):
+ """Sets the n_types of this InlineResponse200TextComplexity.
+
+ Number of distinct word forms in the given corpus. # noqa: E501
+
+ :param n_types: The n_types of this InlineResponse200TextComplexity.
+ :type n_types: int
+ """
+
+ self._n_types = n_types
+
+ @property
+ def n_w(self):
+ """Gets the n_w of this InlineResponse200TextComplexity.
+
+ Number of words in the given corpus. # noqa: E501
+
+ :return: The n_w of this InlineResponse200TextComplexity.
+ :rtype: int
+ """
+ return self._n_w
+
+ @n_w.setter
+ def n_w(self, n_w):
+ """Sets the n_w of this InlineResponse200TextComplexity.
+
+ Number of words in the given corpus. # noqa: E501
+
+ :param n_w: The n_w of this InlineResponse200TextComplexity.
+ :type n_w: int
+ """
+
+ self._n_w = n_w
+
+ @property
+ def pos(self):
+ """Gets the pos of this InlineResponse200TextComplexity.
+
+ Number of distinct part of speech tags in the given corpus. # noqa: E501
+
+ :return: The pos of this InlineResponse200TextComplexity.
+ :rtype: int
+ """
+ return self._pos
+
+ @pos.setter
+ def pos(self, pos):
+ """Sets the pos of this InlineResponse200TextComplexity.
+
+ Number of distinct part of speech tags in the given corpus. # noqa: E501
+
+ :param pos: The pos of this InlineResponse200TextComplexity.
+ :type pos: int
+ """
+
+ self._pos = pos
diff --git a/mc_backend/openapi/openapi_server/models/learning_result.py b/mc_backend/openapi/openapi_server/models/learning_result.py
new file mode 100644
index 0000000..14aed0a
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/learning_result.py
@@ -0,0 +1,672 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class LearningResult(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, actor_account_name='', actor_object_type='', category_id='', category_object_type='', choices='[]', completion=None, correct_responses_pattern=None, created_time=None, duration='PT0S', extensions='{}', interaction_type='', object_definition_description=None, object_definition_type='', object_object_type='', response=None, score_max=None, score_min=None, score_raw=None, score_scaled=0, success=None, verb_display='', verb_id=''): # noqa: E501
+ """LearningResult - a model defined in OpenAPI
+
+ :param actor_account_name: The actor_account_name of this LearningResult. # noqa: E501
+ :type actor_account_name: str
+ :param actor_object_type: The actor_object_type of this LearningResult. # noqa: E501
+ :type actor_object_type: str
+ :param category_id: The category_id of this LearningResult. # noqa: E501
+ :type category_id: str
+ :param category_object_type: The category_object_type of this LearningResult. # noqa: E501
+ :type category_object_type: str
+ :param choices: The choices of this LearningResult. # noqa: E501
+ :type choices: str
+ :param completion: The completion of this LearningResult. # noqa: E501
+ :type completion: bool
+ :param correct_responses_pattern: The correct_responses_pattern of this LearningResult. # noqa: E501
+ :type correct_responses_pattern: str
+ :param created_time: The created_time of this LearningResult. # noqa: E501
+ :type created_time: float
+ :param duration: The duration of this LearningResult. # noqa: E501
+ :type duration: str
+ :param extensions: The extensions of this LearningResult. # noqa: E501
+ :type extensions: str
+ :param interaction_type: The interaction_type of this LearningResult. # noqa: E501
+ :type interaction_type: str
+ :param object_definition_description: The object_definition_description of this LearningResult. # noqa: E501
+ :type object_definition_description: str
+ :param object_definition_type: The object_definition_type of this LearningResult. # noqa: E501
+ :type object_definition_type: str
+ :param object_object_type: The object_object_type of this LearningResult. # noqa: E501
+ :type object_object_type: str
+ :param response: The response of this LearningResult. # noqa: E501
+ :type response: str
+ :param score_max: The score_max of this LearningResult. # noqa: E501
+ :type score_max: int
+ :param score_min: The score_min of this LearningResult. # noqa: E501
+ :type score_min: int
+ :param score_raw: The score_raw of this LearningResult. # noqa: E501
+ :type score_raw: int
+ :param score_scaled: The score_scaled of this LearningResult. # noqa: E501
+ :type score_scaled: float
+ :param success: The success of this LearningResult. # noqa: E501
+ :type success: bool
+ :param verb_display: The verb_display of this LearningResult. # noqa: E501
+ :type verb_display: str
+ :param verb_id: The verb_id of this LearningResult. # noqa: E501
+ :type verb_id: str
+ """
+ self.openapi_types = {
+ 'actor_account_name': str,
+ 'actor_object_type': str,
+ 'category_id': str,
+ 'category_object_type': str,
+ 'choices': str,
+ 'completion': bool,
+ 'correct_responses_pattern': str,
+ 'created_time': float,
+ 'duration': str,
+ 'extensions': str,
+ 'interaction_type': str,
+ 'object_definition_description': str,
+ 'object_definition_type': str,
+ 'object_object_type': str,
+ 'response': str,
+ 'score_max': int,
+ 'score_min': int,
+ 'score_raw': int,
+ 'score_scaled': float,
+ 'success': bool,
+ 'verb_display': str,
+ 'verb_id': str
+ }
+
+ self.attribute_map = {
+ 'actor_account_name': 'actor_account_name',
+ 'actor_object_type': 'actor_object_type',
+ 'category_id': 'category_id',
+ 'category_object_type': 'category_object_type',
+ 'choices': 'choices',
+ 'completion': 'completion',
+ 'correct_responses_pattern': 'correct_responses_pattern',
+ 'created_time': 'created_time',
+ 'duration': 'duration',
+ 'extensions': 'extensions',
+ 'interaction_type': 'interaction_type',
+ 'object_definition_description': 'object_definition_description',
+ 'object_definition_type': 'object_definition_type',
+ 'object_object_type': 'object_object_type',
+ 'response': 'response',
+ 'score_max': 'score_max',
+ 'score_min': 'score_min',
+ 'score_raw': 'score_raw',
+ 'score_scaled': 'score_scaled',
+ 'success': 'success',
+ 'verb_display': 'verb_display',
+ 'verb_id': 'verb_id'
+ }
+
+ self._actor_account_name = actor_account_name
+ self._actor_object_type = actor_object_type
+ self._category_id = category_id
+ self._category_object_type = category_object_type
+ self._choices = choices
+ self._completion = completion
+ self._correct_responses_pattern = correct_responses_pattern
+ self._created_time = created_time
+ self._duration = duration
+ self._extensions = extensions
+ self._interaction_type = interaction_type
+ self._object_definition_description = object_definition_description
+ self._object_definition_type = object_definition_type
+ self._object_object_type = object_object_type
+ self._response = response
+ self._score_max = score_max
+ self._score_min = score_min
+ self._score_raw = score_raw
+ self._score_scaled = score_scaled
+ self._success = success
+ self._verb_display = verb_display
+ self._verb_id = verb_id
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'LearningResult':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The LearningResult of this LearningResult. # noqa: E501
+ :rtype: LearningResult
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def actor_account_name(self):
+ """Gets the actor_account_name of this LearningResult.
+
+ H5P user ID, usually unique per device. # noqa: E501
+
+ :return: The actor_account_name of this LearningResult.
+ :rtype: str
+ """
+ return self._actor_account_name
+
+ @actor_account_name.setter
+ def actor_account_name(self, actor_account_name):
+ """Sets the actor_account_name of this LearningResult.
+
+ H5P user ID, usually unique per device. # noqa: E501
+
+ :param actor_account_name: The actor_account_name of this LearningResult.
+ :type actor_account_name: str
+ """
+
+ self._actor_account_name = actor_account_name
+
+ @property
+ def actor_object_type(self):
+ """Gets the actor_object_type of this LearningResult.
+
+ Describes the kind of object that was recognized as actor. # noqa: E501
+
+ :return: The actor_object_type of this LearningResult.
+ :rtype: str
+ """
+ return self._actor_object_type
+
+ @actor_object_type.setter
+ def actor_object_type(self, actor_object_type):
+ """Sets the actor_object_type of this LearningResult.
+
+ Describes the kind of object that was recognized as actor. # noqa: E501
+
+ :param actor_object_type: The actor_object_type of this LearningResult.
+ :type actor_object_type: str
+ """
+
+ self._actor_object_type = actor_object_type
+
+ @property
+ def category_id(self):
+ """Gets the category_id of this LearningResult.
+
+ Link to the exercise type specification. # noqa: E501
+
+ :return: The category_id of this LearningResult.
+ :rtype: str
+ """
+ return self._category_id
+
+ @category_id.setter
+ def category_id(self, category_id):
+ """Sets the category_id of this LearningResult.
+
+ Link to the exercise type specification. # noqa: E501
+
+ :param category_id: The category_id of this LearningResult.
+ :type category_id: str
+ """
+
+ self._category_id = category_id
+
+ @property
+ def category_object_type(self):
+ """Gets the category_object_type of this LearningResult.
+
+ Describes the kind of object that was recognized as exercise. # noqa: E501
+
+ :return: The category_object_type of this LearningResult.
+ :rtype: str
+ """
+ return self._category_object_type
+
+ @category_object_type.setter
+ def category_object_type(self, category_object_type):
+ """Sets the category_object_type of this LearningResult.
+
+ Describes the kind of object that was recognized as exercise. # noqa: E501
+
+ :param category_object_type: The category_object_type of this LearningResult.
+ :type category_object_type: str
+ """
+
+ self._category_object_type = category_object_type
+
+ @property
+ def choices(self):
+ """Gets the choices of this LearningResult.
+
+ JSON string containing a list of possible choices, each with ID and description. # noqa: E501
+
+ :return: The choices of this LearningResult.
+ :rtype: str
+ """
+ return self._choices
+
+ @choices.setter
+ def choices(self, choices):
+ """Sets the choices of this LearningResult.
+
+ JSON string containing a list of possible choices, each with ID and description. # noqa: E501
+
+ :param choices: The choices of this LearningResult.
+ :type choices: str
+ """
+
+ self._choices = choices
+
+ @property
+ def completion(self):
+ """Gets the completion of this LearningResult.
+
+ Whether the exercise was fully processed or not. # noqa: E501
+
+ :return: The completion of this LearningResult.
+ :rtype: bool
+ """
+ return self._completion
+
+ @completion.setter
+ def completion(self, completion):
+ """Sets the completion of this LearningResult.
+
+ Whether the exercise was fully processed or not. # noqa: E501
+
+ :param completion: The completion of this LearningResult.
+ :type completion: bool
+ """
+ if completion is None:
+ raise ValueError("Invalid value for `completion`, must not be `None`") # noqa: E501
+
+ self._completion = completion
+
+ @property
+ def correct_responses_pattern(self):
+ """Gets the correct_responses_pattern of this LearningResult.
+
+ JSON string containing a list of possible solutions to the exercise, given as patterns of answers. # noqa: E501
+
+ :return: The correct_responses_pattern of this LearningResult.
+ :rtype: str
+ """
+ return self._correct_responses_pattern
+
+ @correct_responses_pattern.setter
+ def correct_responses_pattern(self, correct_responses_pattern):
+ """Sets the correct_responses_pattern of this LearningResult.
+
+ JSON string containing a list of possible solutions to the exercise, given as patterns of answers. # noqa: E501
+
+ :param correct_responses_pattern: The correct_responses_pattern of this LearningResult.
+ :type correct_responses_pattern: str
+ """
+ if correct_responses_pattern is None:
+ raise ValueError("Invalid value for `correct_responses_pattern`, must not be `None`") # noqa: E501
+
+ self._correct_responses_pattern = correct_responses_pattern
+
+ @property
+ def created_time(self):
+ """Gets the created_time of this LearningResult.
+
+ When the learner data was received (POSIX timestamp). # noqa: E501
+
+ :return: The created_time of this LearningResult.
+ :rtype: float
+ """
+ return self._created_time
+
+ @created_time.setter
+ def created_time(self, created_time):
+ """Sets the created_time of this LearningResult.
+
+ When the learner data was received (POSIX timestamp). # noqa: E501
+
+ :param created_time: The created_time of this LearningResult.
+ :type created_time: float
+ """
+ if created_time is None:
+ raise ValueError("Invalid value for `created_time`, must not be `None`") # noqa: E501
+
+ self._created_time = created_time
+
+ @property
+ def duration(self):
+ """Gets the duration of this LearningResult.
+
+ How many seconds it took a learner to complete the exercise. # noqa: E501
+
+ :return: The duration of this LearningResult.
+ :rtype: str
+ """
+ return self._duration
+
+ @duration.setter
+ def duration(self, duration):
+ """Sets the duration of this LearningResult.
+
+ How many seconds it took a learner to complete the exercise. # noqa: E501
+
+ :param duration: The duration of this LearningResult.
+ :type duration: str
+ """
+
+ self._duration = duration
+
+ @property
+ def extensions(self):
+ """Gets the extensions of this LearningResult.
+
+ JSON string containing a mapping of keys and values (usually the local content ID, i.e. a versioning mechanism). # noqa: E501
+
+ :return: The extensions of this LearningResult.
+ :rtype: str
+ """
+ return self._extensions
+
+ @extensions.setter
+ def extensions(self, extensions):
+ """Sets the extensions of this LearningResult.
+
+ JSON string containing a mapping of keys and values (usually the local content ID, i.e. a versioning mechanism). # noqa: E501
+
+ :param extensions: The extensions of this LearningResult.
+ :type extensions: str
+ """
+
+ self._extensions = extensions
+
+ @property
+ def interaction_type(self):
+ """Gets the interaction_type of this LearningResult.
+
+ Exercise type. # noqa: E501
+
+ :return: The interaction_type of this LearningResult.
+ :rtype: str
+ """
+ return self._interaction_type
+
+ @interaction_type.setter
+ def interaction_type(self, interaction_type):
+ """Sets the interaction_type of this LearningResult.
+
+ Exercise type. # noqa: E501
+
+ :param interaction_type: The interaction_type of this LearningResult.
+ :type interaction_type: str
+ """
+
+ self._interaction_type = interaction_type
+
+ @property
+ def object_definition_description(self):
+ """Gets the object_definition_description of this LearningResult.
+
+ Exercise content, possibly including instructions. # noqa: E501
+
+ :return: The object_definition_description of this LearningResult.
+ :rtype: str
+ """
+ return self._object_definition_description
+
+ @object_definition_description.setter
+ def object_definition_description(self, object_definition_description):
+ """Sets the object_definition_description of this LearningResult.
+
+ Exercise content, possibly including instructions. # noqa: E501
+
+ :param object_definition_description: The object_definition_description of this LearningResult.
+ :type object_definition_description: str
+ """
+ if object_definition_description is None:
+ raise ValueError("Invalid value for `object_definition_description`, must not be `None`") # noqa: E501
+
+ self._object_definition_description = object_definition_description
+
+ @property
+ def object_definition_type(self):
+ """Gets the object_definition_type of this LearningResult.
+
+ Type of object definition that is presented to the user. # noqa: E501
+
+ :return: The object_definition_type of this LearningResult.
+ :rtype: str
+ """
+ return self._object_definition_type
+
+ @object_definition_type.setter
+ def object_definition_type(self, object_definition_type):
+ """Sets the object_definition_type of this LearningResult.
+
+ Type of object definition that is presented to the user. # noqa: E501
+
+ :param object_definition_type: The object_definition_type of this LearningResult.
+ :type object_definition_type: str
+ """
+
+ self._object_definition_type = object_definition_type
+
+ @property
+ def object_object_type(self):
+ """Gets the object_object_type of this LearningResult.
+
+ Type of object that is presented to the user. # noqa: E501
+
+ :return: The object_object_type of this LearningResult.
+ :rtype: str
+ """
+ return self._object_object_type
+
+ @object_object_type.setter
+ def object_object_type(self, object_object_type):
+ """Sets the object_object_type of this LearningResult.
+
+ Type of object that is presented to the user. # noqa: E501
+
+ :param object_object_type: The object_object_type of this LearningResult.
+ :type object_object_type: str
+ """
+
+ self._object_object_type = object_object_type
+
+ @property
+ def response(self):
+ """Gets the response of this LearningResult.
+
+ Answer provided by the user, possibly as a pattern. # noqa: E501
+
+ :return: The response of this LearningResult.
+ :rtype: str
+ """
+ return self._response
+
+ @response.setter
+ def response(self, response):
+ """Sets the response of this LearningResult.
+
+ Answer provided by the user, possibly as a pattern. # noqa: E501
+
+ :param response: The response of this LearningResult.
+ :type response: str
+ """
+ if response is None:
+ raise ValueError("Invalid value for `response`, must not be `None`") # noqa: E501
+
+ self._response = response
+
+ @property
+ def score_max(self):
+ """Gets the score_max of this LearningResult.
+
+ Maximum possible score to be achieved in this exercise. # noqa: E501
+
+ :return: The score_max of this LearningResult.
+ :rtype: int
+ """
+ return self._score_max
+
+ @score_max.setter
+ def score_max(self, score_max):
+ """Sets the score_max of this LearningResult.
+
+ Maximum possible score to be achieved in this exercise. # noqa: E501
+
+ :param score_max: The score_max of this LearningResult.
+ :type score_max: int
+ """
+ if score_max is None:
+ raise ValueError("Invalid value for `score_max`, must not be `None`") # noqa: E501
+
+ self._score_max = score_max
+
+ @property
+ def score_min(self):
+ """Gets the score_min of this LearningResult.
+
+ Minimum score to be achieved in this exercise. # noqa: E501
+
+ :return: The score_min of this LearningResult.
+ :rtype: int
+ """
+ return self._score_min
+
+ @score_min.setter
+ def score_min(self, score_min):
+ """Sets the score_min of this LearningResult.
+
+ Minimum score to be achieved in this exercise. # noqa: E501
+
+ :param score_min: The score_min of this LearningResult.
+ :type score_min: int
+ """
+ if score_min is None:
+ raise ValueError("Invalid value for `score_min`, must not be `None`") # noqa: E501
+
+ self._score_min = score_min
+
+ @property
+ def score_raw(self):
+ """Gets the score_raw of this LearningResult.
+
+ Score that was actually achieved by the user in this exercise. # noqa: E501
+
+ :return: The score_raw of this LearningResult.
+ :rtype: int
+ """
+ return self._score_raw
+
+ @score_raw.setter
+ def score_raw(self, score_raw):
+ """Sets the score_raw of this LearningResult.
+
+ Score that was actually achieved by the user in this exercise. # noqa: E501
+
+ :param score_raw: The score_raw of this LearningResult.
+ :type score_raw: int
+ """
+ if score_raw is None:
+ raise ValueError("Invalid value for `score_raw`, must not be `None`") # noqa: E501
+
+ self._score_raw = score_raw
+
+ @property
+ def score_scaled(self):
+ """Gets the score_scaled of this LearningResult.
+
+ Relative score (between 0 and 1) that was actually achieved by the user in this exercise. # noqa: E501
+
+ :return: The score_scaled of this LearningResult.
+ :rtype: float
+ """
+ return self._score_scaled
+
+ @score_scaled.setter
+ def score_scaled(self, score_scaled):
+ """Sets the score_scaled of this LearningResult.
+
+ Relative score (between 0 and 1) that was actually achieved by the user in this exercise. # noqa: E501
+
+ :param score_scaled: The score_scaled of this LearningResult.
+ :type score_scaled: float
+ """
+
+ self._score_scaled = score_scaled
+
+ @property
+ def success(self):
+ """Gets the success of this LearningResult.
+
+ Whether the exercise was successfully completed or not. # noqa: E501
+
+ :return: The success of this LearningResult.
+ :rtype: bool
+ """
+ return self._success
+
+ @success.setter
+ def success(self, success):
+ """Sets the success of this LearningResult.
+
+ Whether the exercise was successfully completed or not. # noqa: E501
+
+ :param success: The success of this LearningResult.
+ :type success: bool
+ """
+ if success is None:
+ raise ValueError("Invalid value for `success`, must not be `None`") # noqa: E501
+
+ self._success = success
+
+ @property
+ def verb_display(self):
+ """Gets the verb_display of this LearningResult.
+
+ Type of action that was performed by the user. # noqa: E501
+
+ :return: The verb_display of this LearningResult.
+ :rtype: str
+ """
+ return self._verb_display
+
+ @verb_display.setter
+ def verb_display(self, verb_display):
+ """Sets the verb_display of this LearningResult.
+
+ Type of action that was performed by the user. # noqa: E501
+
+ :param verb_display: The verb_display of this LearningResult.
+ :type verb_display: str
+ """
+
+ self._verb_display = verb_display
+
+ @property
+ def verb_id(self):
+ """Gets the verb_id of this LearningResult.
+
+ Link to the type of action that was performed by the user. # noqa: E501
+
+ :return: The verb_id of this LearningResult.
+ :rtype: str
+ """
+ return self._verb_id
+
+ @verb_id.setter
+ def verb_id(self, verb_id):
+ """Sets the verb_id of this LearningResult.
+
+ Link to the type of action that was performed by the user. # noqa: E501
+
+ :param verb_id: The verb_id of this LearningResult.
+ :type verb_id: str
+ """
+
+ self._verb_id = verb_id
diff --git a/mc_backend/openapi/openapi_server/models/link.py b/mc_backend/openapi/openapi_server/models/link.py
new file mode 100644
index 0000000..2510829
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/link.py
@@ -0,0 +1,186 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class Link(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, annis_component_name=None, annis_component_type=None, source=None, target=None, udep_deprel=None): # noqa: E501
+ """Link - a model defined in OpenAPI
+
+ :param annis_component_name: The annis_component_name of this Link. # noqa: E501
+ :type annis_component_name: str
+ :param annis_component_type: The annis_component_type of this Link. # noqa: E501
+ :type annis_component_type: str
+ :param source: The source of this Link. # noqa: E501
+ :type source: str
+ :param target: The target of this Link. # noqa: E501
+ :type target: str
+ :param udep_deprel: The udep_deprel of this Link. # noqa: E501
+ :type udep_deprel: str
+ """
+ self.openapi_types = {
+ 'annis_component_name': str,
+ 'annis_component_type': str,
+ 'source': str,
+ 'target': str,
+ 'udep_deprel': str
+ }
+
+ self.attribute_map = {
+ 'annis_component_name': 'annis_component_name',
+ 'annis_component_type': 'annis_component_type',
+ 'source': 'source',
+ 'target': 'target',
+ 'udep_deprel': 'udep_deprel'
+ }
+
+ self._annis_component_name = annis_component_name
+ self._annis_component_type = annis_component_type
+ self._source = source
+ self._target = target
+ self._udep_deprel = udep_deprel
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'Link':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The Link of this Link. # noqa: E501
+ :rtype: Link
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def annis_component_name(self):
+ """Gets the annis_component_name of this Link.
+
+ Component name as given by ANNIS. # noqa: E501
+
+ :return: The annis_component_name of this Link.
+ :rtype: str
+ """
+ return self._annis_component_name
+
+ @annis_component_name.setter
+ def annis_component_name(self, annis_component_name):
+ """Sets the annis_component_name of this Link.
+
+ Component name as given by ANNIS. # noqa: E501
+
+ :param annis_component_name: The annis_component_name of this Link.
+ :type annis_component_name: str
+ """
+ if annis_component_name is None:
+ raise ValueError("Invalid value for `annis_component_name`, must not be `None`") # noqa: E501
+
+ self._annis_component_name = annis_component_name
+
+ @property
+ def annis_component_type(self):
+ """Gets the annis_component_type of this Link.
+
+ Component type as given by ANNIS. # noqa: E501
+
+ :return: The annis_component_type of this Link.
+ :rtype: str
+ """
+ return self._annis_component_type
+
+ @annis_component_type.setter
+ def annis_component_type(self, annis_component_type):
+ """Sets the annis_component_type of this Link.
+
+ Component type as given by ANNIS. # noqa: E501
+
+ :param annis_component_type: The annis_component_type of this Link.
+ :type annis_component_type: str
+ """
+ if annis_component_type is None:
+ raise ValueError("Invalid value for `annis_component_type`, must not be `None`") # noqa: E501
+
+ self._annis_component_type = annis_component_type
+
+ @property
+ def source(self):
+ """Gets the source of this Link.
+
+ ID of the source node for the edge. # noqa: E501
+
+ :return: The source of this Link.
+ :rtype: str
+ """
+ return self._source
+
+ @source.setter
+ def source(self, source):
+ """Sets the source of this Link.
+
+ ID of the source node for the edge. # noqa: E501
+
+ :param source: The source of this Link.
+ :type source: str
+ """
+ if source is None:
+ raise ValueError("Invalid value for `source`, must not be `None`") # noqa: E501
+
+ self._source = source
+
+ @property
+ def target(self):
+ """Gets the target of this Link.
+
+ ID of the target node for the edge. # noqa: E501
+
+ :return: The target of this Link.
+ :rtype: str
+ """
+ return self._target
+
+ @target.setter
+ def target(self, target):
+ """Sets the target of this Link.
+
+ ID of the target node for the edge. # noqa: E501
+
+ :param target: The target of this Link.
+ :type target: str
+ """
+ if target is None:
+ raise ValueError("Invalid value for `target`, must not be `None`") # noqa: E501
+
+ self._target = target
+
+ @property
+ def udep_deprel(self):
+ """Gets the udep_deprel of this Link.
+
+ Dependency relation described by the edge. # noqa: E501
+
+ :return: The udep_deprel of this Link.
+ :rtype: str
+ """
+ return self._udep_deprel
+
+ @udep_deprel.setter
+ def udep_deprel(self, udep_deprel):
+ """Sets the udep_deprel of this Link.
+
+ Dependency relation described by the edge. # noqa: E501
+
+ :param udep_deprel: The udep_deprel of this Link.
+ :type udep_deprel: str
+ """
+
+ self._udep_deprel = udep_deprel
diff --git a/mc_backend/openapi/openapi_server/models/node.py b/mc_backend/openapi/openapi_server/models/node.py
new file mode 100644
index 0000000..16de072
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/node.py
@@ -0,0 +1,360 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class Node(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, annis_node_name=None, annis_node_type=None, annis_tok=None, annis_type=None, id=None, is_oov=None, udep_lemma=None, udep_upostag=None, udep_xpostag=None, udep_feats=None, solution=None): # noqa: E501
+ """Node - a model defined in OpenAPI
+
+ :param annis_node_name: The annis_node_name of this Node. # noqa: E501
+ :type annis_node_name: str
+ :param annis_node_type: The annis_node_type of this Node. # noqa: E501
+ :type annis_node_type: str
+ :param annis_tok: The annis_tok of this Node. # noqa: E501
+ :type annis_tok: str
+ :param annis_type: The annis_type of this Node. # noqa: E501
+ :type annis_type: str
+ :param id: The id of this Node. # noqa: E501
+ :type id: str
+ :param is_oov: The is_oov of this Node. # noqa: E501
+ :type is_oov: bool
+ :param udep_lemma: The udep_lemma of this Node. # noqa: E501
+ :type udep_lemma: str
+ :param udep_upostag: The udep_upostag of this Node. # noqa: E501
+ :type udep_upostag: str
+ :param udep_xpostag: The udep_xpostag of this Node. # noqa: E501
+ :type udep_xpostag: str
+ :param udep_feats: The udep_feats of this Node. # noqa: E501
+ :type udep_feats: str
+ :param solution: The solution of this Node. # noqa: E501
+ :type solution: str
+ """
+ self.openapi_types = {
+ 'annis_node_name': str,
+ 'annis_node_type': str,
+ 'annis_tok': str,
+ 'annis_type': str,
+ 'id': str,
+ 'is_oov': bool,
+ 'udep_lemma': str,
+ 'udep_upostag': str,
+ 'udep_xpostag': str,
+ 'udep_feats': str,
+ 'solution': str
+ }
+
+ self.attribute_map = {
+ 'annis_node_name': 'annis_node_name',
+ 'annis_node_type': 'annis_node_type',
+ 'annis_tok': 'annis_tok',
+ 'annis_type': 'annis_type',
+ 'id': 'id',
+ 'is_oov': 'is_oov',
+ 'udep_lemma': 'udep_lemma',
+ 'udep_upostag': 'udep_upostag',
+ 'udep_xpostag': 'udep_xpostag',
+ 'udep_feats': 'udep_feats',
+ 'solution': 'solution'
+ }
+
+ self._annis_node_name = annis_node_name
+ self._annis_node_type = annis_node_type
+ self._annis_tok = annis_tok
+ self._annis_type = annis_type
+ self._id = id
+ self._is_oov = is_oov
+ self._udep_lemma = udep_lemma
+ self._udep_upostag = udep_upostag
+ self._udep_xpostag = udep_xpostag
+ self._udep_feats = udep_feats
+ self._solution = solution
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'Node':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The Node of this Node. # noqa: E501
+ :rtype: Node
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def annis_node_name(self):
+ """Gets the annis_node_name of this Node.
+
+ Node name as given by ANNIS. # noqa: E501
+
+ :return: The annis_node_name of this Node.
+ :rtype: str
+ """
+ return self._annis_node_name
+
+ @annis_node_name.setter
+ def annis_node_name(self, annis_node_name):
+ """Sets the annis_node_name of this Node.
+
+ Node name as given by ANNIS. # noqa: E501
+
+ :param annis_node_name: The annis_node_name of this Node.
+ :type annis_node_name: str
+ """
+ if annis_node_name is None:
+ raise ValueError("Invalid value for `annis_node_name`, must not be `None`") # noqa: E501
+
+ self._annis_node_name = annis_node_name
+
+ @property
+ def annis_node_type(self):
+ """Gets the annis_node_type of this Node.
+
+ Node type as given by ANNIS. # noqa: E501
+
+ :return: The annis_node_type of this Node.
+ :rtype: str
+ """
+ return self._annis_node_type
+
+ @annis_node_type.setter
+ def annis_node_type(self, annis_node_type):
+ """Sets the annis_node_type of this Node.
+
+ Node type as given by ANNIS. # noqa: E501
+
+ :param annis_node_type: The annis_node_type of this Node.
+ :type annis_node_type: str
+ """
+ if annis_node_type is None:
+ raise ValueError("Invalid value for `annis_node_type`, must not be `None`") # noqa: E501
+
+ self._annis_node_type = annis_node_type
+
+ @property
+ def annis_tok(self):
+ """Gets the annis_tok of this Node.
+
+ Raw word form as given by ANNIS. # noqa: E501
+
+ :return: The annis_tok of this Node.
+ :rtype: str
+ """
+ return self._annis_tok
+
+ @annis_tok.setter
+ def annis_tok(self, annis_tok):
+ """Sets the annis_tok of this Node.
+
+ Raw word form as given by ANNIS. # noqa: E501
+
+ :param annis_tok: The annis_tok of this Node.
+ :type annis_tok: str
+ """
+ if annis_tok is None:
+ raise ValueError("Invalid value for `annis_tok`, must not be `None`") # noqa: E501
+
+ self._annis_tok = annis_tok
+
+ @property
+ def annis_type(self):
+ """Gets the annis_type of this Node.
+
+ Node type as given by ANNIS (?). # noqa: E501
+
+ :return: The annis_type of this Node.
+ :rtype: str
+ """
+ return self._annis_type
+
+ @annis_type.setter
+ def annis_type(self, annis_type):
+ """Sets the annis_type of this Node.
+
+ Node type as given by ANNIS (?). # noqa: E501
+
+ :param annis_type: The annis_type of this Node.
+ :type annis_type: str
+ """
+ if annis_type is None:
+ raise ValueError("Invalid value for `annis_type`, must not be `None`") # noqa: E501
+
+ self._annis_type = annis_type
+
+ @property
+ def id(self):
+ """Gets the id of this Node.
+
+ Unique identifier for the node in the SALT model. # noqa: E501
+
+ :return: The id of this Node.
+ :rtype: str
+ """
+ return self._id
+
+ @id.setter
+ def id(self, id):
+ """Sets the id of this Node.
+
+ Unique identifier for the node in the SALT model. # noqa: E501
+
+ :param id: The id of this Node.
+ :type id: str
+ """
+ if id is None:
+ raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501
+
+ self._id = id
+
+ @property
+ def is_oov(self):
+ """Gets the is_oov of this Node.
+
+ Whether the raw word form is missing in a given vocabulary. # noqa: E501
+
+ :return: The is_oov of this Node.
+ :rtype: bool
+ """
+ return self._is_oov
+
+ @is_oov.setter
+ def is_oov(self, is_oov):
+ """Sets the is_oov of this Node.
+
+ Whether the raw word form is missing in a given vocabulary. # noqa: E501
+
+ :param is_oov: The is_oov of this Node.
+ :type is_oov: bool
+ """
+
+ self._is_oov = is_oov
+
+ @property
+ def udep_lemma(self):
+ """Gets the udep_lemma of this Node.
+
+ Lemmatized word form. # noqa: E501
+
+ :return: The udep_lemma of this Node.
+ :rtype: str
+ """
+ return self._udep_lemma
+
+ @udep_lemma.setter
+ def udep_lemma(self, udep_lemma):
+ """Sets the udep_lemma of this Node.
+
+ Lemmatized word form. # noqa: E501
+
+ :param udep_lemma: The udep_lemma of this Node.
+ :type udep_lemma: str
+ """
+ if udep_lemma is None:
+ raise ValueError("Invalid value for `udep_lemma`, must not be `None`") # noqa: E501
+
+ self._udep_lemma = udep_lemma
+
+ @property
+ def udep_upostag(self):
+ """Gets the udep_upostag of this Node.
+
+ Universal part of speech tag for the word form. # noqa: E501
+
+ :return: The udep_upostag of this Node.
+ :rtype: str
+ """
+ return self._udep_upostag
+
+ @udep_upostag.setter
+ def udep_upostag(self, udep_upostag):
+ """Sets the udep_upostag of this Node.
+
+ Universal part of speech tag for the word form. # noqa: E501
+
+ :param udep_upostag: The udep_upostag of this Node.
+ :type udep_upostag: str
+ """
+ if udep_upostag is None:
+ raise ValueError("Invalid value for `udep_upostag`, must not be `None`") # noqa: E501
+
+ self._udep_upostag = udep_upostag
+
+ @property
+ def udep_xpostag(self):
+ """Gets the udep_xpostag of this Node.
+
+ Language-specific part of speech tag for the word form. # noqa: E501
+
+ :return: The udep_xpostag of this Node.
+ :rtype: str
+ """
+ return self._udep_xpostag
+
+ @udep_xpostag.setter
+ def udep_xpostag(self, udep_xpostag):
+ """Sets the udep_xpostag of this Node.
+
+ Language-specific part of speech tag for the word form. # noqa: E501
+
+ :param udep_xpostag: The udep_xpostag of this Node.
+ :type udep_xpostag: str
+ """
+
+ self._udep_xpostag = udep_xpostag
+
+ @property
+ def udep_feats(self):
+ """Gets the udep_feats of this Node.
+
+ Additional morphological information. # noqa: E501
+
+ :return: The udep_feats of this Node.
+ :rtype: str
+ """
+ return self._udep_feats
+
+ @udep_feats.setter
+ def udep_feats(self, udep_feats):
+ """Sets the udep_feats of this Node.
+
+ Additional morphological information. # noqa: E501
+
+ :param udep_feats: The udep_feats of this Node.
+ :type udep_feats: str
+ """
+
+ self._udep_feats = udep_feats
+
+ @property
+ def solution(self):
+ """Gets the solution of this Node.
+
+ Solution value for this node in an exercise. # noqa: E501
+
+ :return: The solution of this Node.
+ :rtype: str
+ """
+ return self._solution
+
+ @solution.setter
+ def solution(self, solution):
+ """Sets the solution of this Node.
+
+ Solution value for this node in an exercise. # noqa: E501
+
+ :param solution: The solution of this Node.
+ :type solution: str
+ """
+
+ self._solution = solution
diff --git a/mc_backend/openapi/openapi_server/models/solution.py b/mc_backend/openapi/openapi_server/models/solution.py
new file mode 100644
index 0000000..238f1cf
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/solution.py
@@ -0,0 +1,92 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server.models.solution_element import SolutionElement
+from openapi.openapi_server import util
+
+from openapi.openapi_server.models.solution_element import SolutionElement # noqa: E501
+
+class Solution(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, target=None, value=None): # noqa: E501
+ """Solution - a model defined in OpenAPI
+
+ :param target: The target of this Solution. # noqa: E501
+ :type target: SolutionElement
+ :param value: The value of this Solution. # noqa: E501
+ :type value: SolutionElement
+ """
+ self.openapi_types = {
+ 'target': SolutionElement,
+ 'value': SolutionElement
+ }
+
+ self.attribute_map = {
+ 'target': 'target',
+ 'value': 'value'
+ }
+
+ self._target = target
+ self._value = value
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'Solution':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The Solution of this Solution. # noqa: E501
+ :rtype: Solution
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def target(self):
+ """Gets the target of this Solution.
+
+
+ :return: The target of this Solution.
+ :rtype: SolutionElement
+ """
+ return self._target
+
+ @target.setter
+ def target(self, target):
+ """Sets the target of this Solution.
+
+
+ :param target: The target of this Solution.
+ :type target: SolutionElement
+ """
+
+ self._target = target
+
+ @property
+ def value(self):
+ """Gets the value of this Solution.
+
+
+ :return: The value of this Solution.
+ :rtype: SolutionElement
+ """
+ return self._value
+
+ @value.setter
+ def value(self, value):
+ """Sets the value of this Solution.
+
+
+ :param value: The value of this Solution.
+ :type value: SolutionElement
+ """
+
+ self._value = value
diff --git a/mc_backend/openapi/openapi_server/models/solution_element.py b/mc_backend/openapi/openapi_server/models/solution_element.py
new file mode 100644
index 0000000..1e68e2f
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/solution_element.py
@@ -0,0 +1,156 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class SolutionElement(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, content=None, salt_id=None, sentence_id=None, token_id=None): # noqa: E501
+ """SolutionElement - a model defined in OpenAPI
+
+ :param content: The content of this SolutionElement. # noqa: E501
+ :type content: str
+ :param salt_id: The salt_id of this SolutionElement. # noqa: E501
+ :type salt_id: str
+ :param sentence_id: The sentence_id of this SolutionElement. # noqa: E501
+ :type sentence_id: int
+ :param token_id: The token_id of this SolutionElement. # noqa: E501
+ :type token_id: int
+ """
+ self.openapi_types = {
+ 'content': str,
+ 'salt_id': str,
+ 'sentence_id': int,
+ 'token_id': int
+ }
+
+ self.attribute_map = {
+ 'content': 'content',
+ 'salt_id': 'salt_id',
+ 'sentence_id': 'sentence_id',
+ 'token_id': 'token_id'
+ }
+
+ self._content = content
+ self._salt_id = salt_id
+ self._sentence_id = sentence_id
+ self._token_id = token_id
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'SolutionElement':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The SolutionElement of this SolutionElement. # noqa: E501
+ :rtype: SolutionElement
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def content(self):
+ """Gets the content of this SolutionElement.
+
+ Content of the solution element. # noqa: E501
+
+ :return: The content of this SolutionElement.
+ :rtype: str
+ """
+ return self._content
+
+ @content.setter
+ def content(self, content):
+ """Sets the content of this SolutionElement.
+
+ Content of the solution element. # noqa: E501
+
+ :param content: The content of this SolutionElement.
+ :type content: str
+ """
+ if content is None:
+ raise ValueError("Invalid value for `content`, must not be `None`") # noqa: E501
+
+ self._content = content
+
+ @property
+ def salt_id(self):
+ """Gets the salt_id of this SolutionElement.
+
+ Unique identifier for the node in the SALT model. # noqa: E501
+
+ :return: The salt_id of this SolutionElement.
+ :rtype: str
+ """
+ return self._salt_id
+
+ @salt_id.setter
+ def salt_id(self, salt_id):
+ """Sets the salt_id of this SolutionElement.
+
+ Unique identifier for the node in the SALT model. # noqa: E501
+
+ :param salt_id: The salt_id of this SolutionElement.
+ :type salt_id: str
+ """
+
+ self._salt_id = salt_id
+
+ @property
+ def sentence_id(self):
+ """Gets the sentence_id of this SolutionElement.
+
+ Unique identifier for the sentence in a corpus. # noqa: E501
+
+ :return: The sentence_id of this SolutionElement.
+ :rtype: int
+ """
+ return self._sentence_id
+
+ @sentence_id.setter
+ def sentence_id(self, sentence_id):
+ """Sets the sentence_id of this SolutionElement.
+
+ Unique identifier for the sentence in a corpus. # noqa: E501
+
+ :param sentence_id: The sentence_id of this SolutionElement.
+ :type sentence_id: int
+ """
+ if sentence_id is None:
+ raise ValueError("Invalid value for `sentence_id`, must not be `None`") # noqa: E501
+
+ self._sentence_id = sentence_id
+
+ @property
+ def token_id(self):
+ """Gets the token_id of this SolutionElement.
+
+ Unique identifier for the token in a sentence. # noqa: E501
+
+ :return: The token_id of this SolutionElement.
+ :rtype: int
+ """
+ return self._token_id
+
+ @token_id.setter
+ def token_id(self, token_id):
+ """Sets the token_id of this SolutionElement.
+
+ Unique identifier for the token in a sentence. # noqa: E501
+
+ :param token_id: The token_id of this SolutionElement.
+ :type token_id: int
+ """
+ if token_id is None:
+ raise ValueError("Invalid value for `token_id`, must not be `None`") # noqa: E501
+
+ self._token_id = token_id
diff --git a/mc_backend/openapi/openapi_server/models/text_complexity.py b/mc_backend/openapi/openapi_server/models/text_complexity.py
new file mode 100644
index 0000000..37dee5d
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/text_complexity.py
@@ -0,0 +1,462 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class TextComplexity(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, all=None, avg_w_len=None, avg_w_per_sent=None, lex_den=None, n_abl_abs=None, n_clause=None, n_gerund=None, n_inf=None, n_part=None, n_punct=None, n_sent=None, n_subclause=None, n_types=None, n_w=None, pos=None): # noqa: E501
+ """TextComplexity - a model defined in OpenAPI
+
+ :param all: The all of this TextComplexity. # noqa: E501
+ :type all: float
+ :param avg_w_len: The avg_w_len of this TextComplexity. # noqa: E501
+ :type avg_w_len: float
+ :param avg_w_per_sent: The avg_w_per_sent of this TextComplexity. # noqa: E501
+ :type avg_w_per_sent: float
+ :param lex_den: The lex_den of this TextComplexity. # noqa: E501
+ :type lex_den: float
+ :param n_abl_abs: The n_abl_abs of this TextComplexity. # noqa: E501
+ :type n_abl_abs: int
+ :param n_clause: The n_clause of this TextComplexity. # noqa: E501
+ :type n_clause: int
+ :param n_gerund: The n_gerund of this TextComplexity. # noqa: E501
+ :type n_gerund: int
+ :param n_inf: The n_inf of this TextComplexity. # noqa: E501
+ :type n_inf: int
+ :param n_part: The n_part of this TextComplexity. # noqa: E501
+ :type n_part: int
+ :param n_punct: The n_punct of this TextComplexity. # noqa: E501
+ :type n_punct: int
+ :param n_sent: The n_sent of this TextComplexity. # noqa: E501
+ :type n_sent: int
+ :param n_subclause: The n_subclause of this TextComplexity. # noqa: E501
+ :type n_subclause: int
+ :param n_types: The n_types of this TextComplexity. # noqa: E501
+ :type n_types: int
+ :param n_w: The n_w of this TextComplexity. # noqa: E501
+ :type n_w: int
+ :param pos: The pos of this TextComplexity. # noqa: E501
+ :type pos: int
+ """
+ self.openapi_types = {
+ 'all': float,
+ 'avg_w_len': float,
+ 'avg_w_per_sent': float,
+ 'lex_den': float,
+ 'n_abl_abs': int,
+ 'n_clause': int,
+ 'n_gerund': int,
+ 'n_inf': int,
+ 'n_part': int,
+ 'n_punct': int,
+ 'n_sent': int,
+ 'n_subclause': int,
+ 'n_types': int,
+ 'n_w': int,
+ 'pos': int
+ }
+
+ self.attribute_map = {
+ 'all': 'all',
+ 'avg_w_len': 'avg_w_len',
+ 'avg_w_per_sent': 'avg_w_per_sent',
+ 'lex_den': 'lex_den',
+ 'n_abl_abs': 'n_abl_abs',
+ 'n_clause': 'n_clause',
+ 'n_gerund': 'n_gerund',
+ 'n_inf': 'n_inf',
+ 'n_part': 'n_part',
+ 'n_punct': 'n_punct',
+ 'n_sent': 'n_sent',
+ 'n_subclause': 'n_subclause',
+ 'n_types': 'n_types',
+ 'n_w': 'n_w',
+ 'pos': 'pos'
+ }
+
+ self._all = all
+ self._avg_w_len = avg_w_len
+ self._avg_w_per_sent = avg_w_per_sent
+ self._lex_den = lex_den
+ self._n_abl_abs = n_abl_abs
+ self._n_clause = n_clause
+ self._n_gerund = n_gerund
+ self._n_inf = n_inf
+ self._n_part = n_part
+ self._n_punct = n_punct
+ self._n_sent = n_sent
+ self._n_subclause = n_subclause
+ self._n_types = n_types
+ self._n_w = n_w
+ self._pos = pos
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'TextComplexity':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The TextComplexity of this TextComplexity. # noqa: E501
+ :rtype: TextComplexity
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def all(self):
+ """Gets the all of this TextComplexity.
+
+ Overall text complexity of the given corpus. # noqa: E501
+
+ :return: The all of this TextComplexity.
+ :rtype: float
+ """
+ return self._all
+
+ @all.setter
+ def all(self, all):
+ """Sets the all of this TextComplexity.
+
+ Overall text complexity of the given corpus. # noqa: E501
+
+ :param all: The all of this TextComplexity.
+ :type all: float
+ """
+
+ self._all = all
+
+ @property
+ def avg_w_len(self):
+ """Gets the avg_w_len of this TextComplexity.
+
+ Average length of a word in the given corpus. # noqa: E501
+
+ :return: The avg_w_len of this TextComplexity.
+ :rtype: float
+ """
+ return self._avg_w_len
+
+ @avg_w_len.setter
+ def avg_w_len(self, avg_w_len):
+ """Sets the avg_w_len of this TextComplexity.
+
+ Average length of a word in the given corpus. # noqa: E501
+
+ :param avg_w_len: The avg_w_len of this TextComplexity.
+ :type avg_w_len: float
+ """
+
+ self._avg_w_len = avg_w_len
+
+ @property
+ def avg_w_per_sent(self):
+ """Gets the avg_w_per_sent of this TextComplexity.
+
+ Average number of words per sentence. # noqa: E501
+
+ :return: The avg_w_per_sent of this TextComplexity.
+ :rtype: float
+ """
+ return self._avg_w_per_sent
+
+ @avg_w_per_sent.setter
+ def avg_w_per_sent(self, avg_w_per_sent):
+ """Sets the avg_w_per_sent of this TextComplexity.
+
+ Average number of words per sentence. # noqa: E501
+
+ :param avg_w_per_sent: The avg_w_per_sent of this TextComplexity.
+ :type avg_w_per_sent: float
+ """
+
+ self._avg_w_per_sent = avg_w_per_sent
+
+ @property
+ def lex_den(self):
+ """Gets the lex_den of this TextComplexity.
+
+ Lexical density of the given corpus. # noqa: E501
+
+ :return: The lex_den of this TextComplexity.
+ :rtype: float
+ """
+ return self._lex_den
+
+ @lex_den.setter
+ def lex_den(self, lex_den):
+ """Sets the lex_den of this TextComplexity.
+
+ Lexical density of the given corpus. # noqa: E501
+
+ :param lex_den: The lex_den of this TextComplexity.
+ :type lex_den: float
+ """
+ if lex_den is not None and lex_den > 1: # noqa: E501
+ raise ValueError("Invalid value for `lex_den`, must be a value less than or equal to `1`") # noqa: E501
+ if lex_den is not None and lex_den < 0: # noqa: E501
+ raise ValueError("Invalid value for `lex_den`, must be a value greater than or equal to `0`") # noqa: E501
+
+ self._lex_den = lex_den
+
+ @property
+ def n_abl_abs(self):
+ """Gets the n_abl_abs of this TextComplexity.
+
+ Number of ablativi absoluti in the given corpus. # noqa: E501
+
+ :return: The n_abl_abs of this TextComplexity.
+ :rtype: int
+ """
+ return self._n_abl_abs
+
+ @n_abl_abs.setter
+ def n_abl_abs(self, n_abl_abs):
+ """Sets the n_abl_abs of this TextComplexity.
+
+ Number of ablativi absoluti in the given corpus. # noqa: E501
+
+ :param n_abl_abs: The n_abl_abs of this TextComplexity.
+ :type n_abl_abs: int
+ """
+
+ self._n_abl_abs = n_abl_abs
+
+ @property
+ def n_clause(self):
+ """Gets the n_clause of this TextComplexity.
+
+ Number of clauses in the given corpus. # noqa: E501
+
+ :return: The n_clause of this TextComplexity.
+ :rtype: int
+ """
+ return self._n_clause
+
+ @n_clause.setter
+ def n_clause(self, n_clause):
+ """Sets the n_clause of this TextComplexity.
+
+ Number of clauses in the given corpus. # noqa: E501
+
+ :param n_clause: The n_clause of this TextComplexity.
+ :type n_clause: int
+ """
+
+ self._n_clause = n_clause
+
+ @property
+ def n_gerund(self):
+ """Gets the n_gerund of this TextComplexity.
+
+ Number of gerunds in the given corpus. # noqa: E501
+
+ :return: The n_gerund of this TextComplexity.
+ :rtype: int
+ """
+ return self._n_gerund
+
+ @n_gerund.setter
+ def n_gerund(self, n_gerund):
+ """Sets the n_gerund of this TextComplexity.
+
+ Number of gerunds in the given corpus. # noqa: E501
+
+ :param n_gerund: The n_gerund of this TextComplexity.
+ :type n_gerund: int
+ """
+
+ self._n_gerund = n_gerund
+
+ @property
+ def n_inf(self):
+ """Gets the n_inf of this TextComplexity.
+
+ Number of infinitives in the given corpus. # noqa: E501
+
+ :return: The n_inf of this TextComplexity.
+ :rtype: int
+ """
+ return self._n_inf
+
+ @n_inf.setter
+ def n_inf(self, n_inf):
+ """Sets the n_inf of this TextComplexity.
+
+ Number of infinitives in the given corpus. # noqa: E501
+
+ :param n_inf: The n_inf of this TextComplexity.
+ :type n_inf: int
+ """
+
+ self._n_inf = n_inf
+
+ @property
+ def n_part(self):
+ """Gets the n_part of this TextComplexity.
+
+ Number of participles in the given corpus. # noqa: E501
+
+ :return: The n_part of this TextComplexity.
+ :rtype: int
+ """
+ return self._n_part
+
+ @n_part.setter
+ def n_part(self, n_part):
+ """Sets the n_part of this TextComplexity.
+
+ Number of participles in the given corpus. # noqa: E501
+
+ :param n_part: The n_part of this TextComplexity.
+ :type n_part: int
+ """
+
+ self._n_part = n_part
+
+ @property
+ def n_punct(self):
+ """Gets the n_punct of this TextComplexity.
+
+ Number of punctuation signs in the given corpus. # noqa: E501
+
+ :return: The n_punct of this TextComplexity.
+ :rtype: int
+ """
+ return self._n_punct
+
+ @n_punct.setter
+ def n_punct(self, n_punct):
+ """Sets the n_punct of this TextComplexity.
+
+ Number of punctuation signs in the given corpus. # noqa: E501
+
+ :param n_punct: The n_punct of this TextComplexity.
+ :type n_punct: int
+ """
+
+ self._n_punct = n_punct
+
+ @property
+ def n_sent(self):
+ """Gets the n_sent of this TextComplexity.
+
+ Number of sentences in the given corpus. # noqa: E501
+
+ :return: The n_sent of this TextComplexity.
+ :rtype: int
+ """
+ return self._n_sent
+
+ @n_sent.setter
+ def n_sent(self, n_sent):
+ """Sets the n_sent of this TextComplexity.
+
+ Number of sentences in the given corpus. # noqa: E501
+
+ :param n_sent: The n_sent of this TextComplexity.
+ :type n_sent: int
+ """
+
+ self._n_sent = n_sent
+
+ @property
+ def n_subclause(self):
+ """Gets the n_subclause of this TextComplexity.
+
+ Number of subclauses in the given corpus. # noqa: E501
+
+ :return: The n_subclause of this TextComplexity.
+ :rtype: int
+ """
+ return self._n_subclause
+
+ @n_subclause.setter
+ def n_subclause(self, n_subclause):
+ """Sets the n_subclause of this TextComplexity.
+
+ Number of subclauses in the given corpus. # noqa: E501
+
+ :param n_subclause: The n_subclause of this TextComplexity.
+ :type n_subclause: int
+ """
+
+ self._n_subclause = n_subclause
+
+ @property
+ def n_types(self):
+ """Gets the n_types of this TextComplexity.
+
+ Number of distinct word forms in the given corpus. # noqa: E501
+
+ :return: The n_types of this TextComplexity.
+ :rtype: int
+ """
+ return self._n_types
+
+ @n_types.setter
+ def n_types(self, n_types):
+ """Sets the n_types of this TextComplexity.
+
+ Number of distinct word forms in the given corpus. # noqa: E501
+
+ :param n_types: The n_types of this TextComplexity.
+ :type n_types: int
+ """
+
+ self._n_types = n_types
+
+ @property
+ def n_w(self):
+ """Gets the n_w of this TextComplexity.
+
+ Number of words in the given corpus. # noqa: E501
+
+ :return: The n_w of this TextComplexity.
+ :rtype: int
+ """
+ return self._n_w
+
+ @n_w.setter
+ def n_w(self, n_w):
+ """Sets the n_w of this TextComplexity.
+
+ Number of words in the given corpus. # noqa: E501
+
+ :param n_w: The n_w of this TextComplexity.
+ :type n_w: int
+ """
+
+ self._n_w = n_w
+
+ @property
+ def pos(self):
+ """Gets the pos of this TextComplexity.
+
+ Number of distinct part of speech tags in the given corpus. # noqa: E501
+
+ :return: The pos of this TextComplexity.
+ :rtype: int
+ """
+ return self._pos
+
+ @pos.setter
+ def pos(self, pos):
+ """Sets the pos of this TextComplexity.
+
+ Number of distinct part of speech tags in the given corpus. # noqa: E501
+
+ :param pos: The pos of this TextComplexity.
+ :type pos: int
+ """
+
+ self._pos = pos
diff --git a/mc_backend/openapi/openapi_server/models/update_info.py b/mc_backend/openapi/openapi_server/models/update_info.py
new file mode 100644
index 0000000..bb9c6a0
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/models/update_info.py
@@ -0,0 +1,132 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from openapi.openapi_server.models.base_model_ import Model
+from openapi.openapi_server import util
+
+
+class UpdateInfo(Model):
+ """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, created_time=None, last_modified_time=None, resource_type=None): # noqa: E501
+ """UpdateInfo - a model defined in OpenAPI
+
+ :param created_time: The created_time of this UpdateInfo. # noqa: E501
+ :type created_time: float
+ :param last_modified_time: The last_modified_time of this UpdateInfo. # noqa: E501
+ :type last_modified_time: float
+ :param resource_type: The resource_type of this UpdateInfo. # noqa: E501
+ :type resource_type: str
+ """
+ self.openapi_types = {
+ 'created_time': float,
+ 'last_modified_time': float,
+ 'resource_type': str
+ }
+
+ self.attribute_map = {
+ 'created_time': 'created_time',
+ 'last_modified_time': 'last_modified_time',
+ 'resource_type': 'resource_type'
+ }
+
+ self._created_time = created_time
+ self._last_modified_time = last_modified_time
+ self._resource_type = resource_type
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'UpdateInfo':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The UpdateInfo of this UpdateInfo. # noqa: E501
+ :rtype: UpdateInfo
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def created_time(self):
+ """Gets the created_time of this UpdateInfo.
+
+ When the resource was created (as POSIX timestamp). # noqa: E501
+
+ :return: The created_time of this UpdateInfo.
+ :rtype: float
+ """
+ return self._created_time
+
+ @created_time.setter
+ def created_time(self, created_time):
+ """Sets the created_time of this UpdateInfo.
+
+ When the resource was created (as POSIX timestamp). # noqa: E501
+
+ :param created_time: The created_time of this UpdateInfo.
+ :type created_time: float
+ """
+ if created_time is None:
+ raise ValueError("Invalid value for `created_time`, must not be `None`") # noqa: E501
+
+ self._created_time = created_time
+
+ @property
+ def last_modified_time(self):
+ """Gets the last_modified_time of this UpdateInfo.
+
+ When the resource was last modified (as POSIX timestamp). # noqa: E501
+
+ :return: The last_modified_time of this UpdateInfo.
+ :rtype: float
+ """
+ return self._last_modified_time
+
+ @last_modified_time.setter
+ def last_modified_time(self, last_modified_time):
+ """Sets the last_modified_time of this UpdateInfo.
+
+ When the resource was last modified (as POSIX timestamp). # noqa: E501
+
+ :param last_modified_time: The last_modified_time of this UpdateInfo.
+ :type last_modified_time: float
+ """
+ if last_modified_time is None:
+ raise ValueError("Invalid value for `last_modified_time`, must not be `None`") # noqa: E501
+
+ self._last_modified_time = last_modified_time
+
+ @property
+ def resource_type(self):
+ """Gets the resource_type of this UpdateInfo.
+
+ Name of the resource for which update timestamps are indexed. # noqa: E501
+
+ :return: The resource_type of this UpdateInfo.
+ :rtype: str
+ """
+ return self._resource_type
+
+ @resource_type.setter
+ def resource_type(self, resource_type):
+ """Sets the resource_type of this UpdateInfo.
+
+ Name of the resource for which update timestamps are indexed. # noqa: E501
+
+ :param resource_type: The resource_type of this UpdateInfo.
+ :type resource_type: str
+ """
+ allowed_values = ["cts_data", "exercise_list", "file_api_clean"] # noqa: E501
+ if resource_type not in allowed_values:
+ raise ValueError(
+ "Invalid value for `resource_type` ({0}), must be one of {1}"
+ .format(resource_type, allowed_values)
+ )
+
+ self._resource_type = resource_type
diff --git a/mc_backend/openapi/openapi_server/openapi/openapi.yaml b/mc_backend/openapi/openapi_server/openapi/openapi.yaml
new file mode 100644
index 0000000..656e96b
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/openapi/openapi.yaml
@@ -0,0 +1,949 @@
+openapi: 3.0.0
+info:
+ title: Machina Callida Backend REST API
+ version: "1.0"
+servers:
+- url: http://localhost:5000/mc/api/v1.0
+paths:
+ /corpora:
+ get:
+ operationId: mcserver_app_api_corpus_list_api_get
+ parameters:
+ - description: Time (in milliseconds) of the last update.
+ explode: true
+ in: query
+ name: last_update_time
+ required: true
+ schema:
+ example: 123456789
+ type: integer
+ style: form
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Corpus'
+ description: Corpus list
+ summary: Returns a list of corpora.
+ x-openapi-router-controller: openapi_server.controllers.default_controller
+ /corpora/{cid}:
+ delete:
+ operationId: mcserver_app_api_corpus_api_delete
+ parameters:
+ - description: Corpus identifier.
+ explode: false
+ in: path
+ name: cid
+ required: true
+ schema:
+ example: 1
+ type: integer
+ style: simple
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ example: true
+ type: boolean
+ description: Indication of success
+ summary: Deletes a single corpus by ID.
+ x-openapi-router-controller: openapi_server.controllers.default_controller
+ get:
+ operationId: mcserver_app_api_corpus_api_get
+ parameters:
+ - description: Corpus identifier.
+ explode: false
+ in: path
+ name: cid
+ required: true
+ schema:
+ example: 1
+ type: integer
+ style: simple
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Corpus'
+ description: Corpus object
+ summary: Returns a single corpus by ID.
+ x-openapi-router-controller: openapi_server.controllers.default_controller
+ patch:
+ operationId: mcserver_app_api_corpus_api_patch
+ parameters:
+ - description: Corpus identifier.
+ explode: false
+ in: path
+ name: cid
+ required: true
+ schema:
+ example: 1
+ type: integer
+ style: simple
+ - description: Author of the texts in the corpus.
+ explode: true
+ in: query
+ name: author
+ required: false
+ schema:
+ example: Aulus Gellius
+ type: string
+ style: form
+ - description: CTS base URN for referencing the corpus.
+ explode: true
+ in: query
+ name: source_urn
+ required: false
+ schema:
+ example: urn:cts:latinLit:phi1254.phi001.perseus-lat2
+ type: string
+ style: form
+ - description: Corpus title.
+ explode: true
+ in: query
+ name: title
+ required: false
+ schema:
+ example: Noctes Atticae
+ type: string
+ style: form
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Corpus'
+ description: Corpus object
+ summary: Updates a single corpus by ID.
+ x-openapi-router-controller: openapi_server.controllers.default_controller
+ /exercise:
+ get:
+ operationId: mcserver_app_api_exercise_api_get
+ parameters:
+ - description: Unique identifier (UUID) for the exercise.
+ explode: true
+ in: query
+ name: eid
+ required: true
+ schema:
+ example: 12345678-1234-5678-1234-567812345678
+ type: string
+ style: form
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/AnnisResponse'
+ description: Exercise data object, including a graph model for linguistic
+ annotations.
+ summary: Returns exercise data by ID.
+ x-openapi-router-controller: openapi_server.controllers.default_controller
+ post:
+ operationId: mcserver_app_api_exercise_api_post
+ requestBody:
+ $ref: '#/components/requestBodies/ExerciseForm'
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/AnnisResponse'
+ description: Exercise data object
+ summary: Creates a new exercise.
+ x-openapi-router-controller: openapi_server.controllers.default_controller
+components:
+ requestBodies:
+ ExerciseForm:
+ content:
+ application/x-www-form-urlencoded:
+ schema:
+ allOf:
+ - $ref: '#/components/schemas/ExerciseBase'
+ - description: Additional exercise data.
+ properties:
+ type:
+ description: Type of exercise, concerning interaction and layout.
+ example: markWords
+ type: string
+ type_translation:
+ description: Localized expression of the exercise type.
+ example: Cloze
+ type: string
+ urn:
+ description: CTS URN for the text passage from which the exercise
+ was created.
+ example: urn:cts:latinLit:phi0448.phi001.perseus-lat2:1.1.1
+ type: string
+ required:
+ - type
+ type: object
+ type: object
+ x-body-name: exercise_data
+ required: true
+ schemas:
+ AnnisResponse:
+ description: A response with graph data from ANNIS, possibly with additional
+ data for exercises.
+ example:
+ frequency_analysis:
+ - values: []
+ count: 1
+ phenomena: []
+ - values: []
+ count: 1
+ phenomena: []
+ text_complexity:
+ all: 42.31
+ n_abl_abs: 1
+ n_clause: 1
+ n_part: 1
+ n_subclause: 1
+ n_punct: 1
+ n_w: 1
+ n_inf: 1
+ n_sent: 1
+ n_types: 1
+ avg_w_per_sent: 5.4
+ lex_den: 0.43
+ avg_w_len: 5.4
+ n_gerund: 1
+ pos: 1
+ exercise_id: 12345678-1234-5678-1234-567812345678
+ solutions:
+ - value:
+ token_id: 9
+ sentence_id: 52548
+ content: unam
+ salt_id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok9
+ target:
+ token_id: 9
+ sentence_id: 52548
+ content: unam
+ salt_id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok9
+ - value:
+ token_id: 9
+ sentence_id: 52548
+ content: unam
+ salt_id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok9
+ target:
+ token_id: 9
+ sentence_id: 52548
+ content: unam
+ salt_id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok9
+ graph_data:
+ directed: true
+ nodes:
+ - annis_node_name: urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_xpostag: Ne
+ solution: solution
+ annis_tok: Galliae
+ annis_type: node
+ annis_node_type: node
+ udep_lemma: Gallia
+ udep_feats: Case=Nom|Gender=Fem|Number=Sing
+ id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_upostag: PROPN
+ is_oov: true
+ - annis_node_name: urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_xpostag: Ne
+ solution: solution
+ annis_tok: Galliae
+ annis_type: node
+ annis_node_type: node
+ udep_lemma: Gallia
+ udep_feats: Case=Nom|Gender=Fem|Number=Sing
+ id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_upostag: PROPN
+ is_oov: true
+ links:
+ - annis_component_name: dep
+ annis_component_type: Pointing
+ source: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_deprel: det
+ target: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok3
+ - annis_component_name: dep
+ annis_component_type: Pointing
+ source: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_deprel: det
+ target: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok3
+ graph: {}
+ multigraph: true
+ uri: /mc/api/v1.0/file/fd97630c-1f5a-4102-af56-20eb0babdfee
+ exercise_type: ddwtos
+ properties:
+ exercise_id:
+ description: Unique identifier (UUID) for the exercise.
+ example: 12345678-1234-5678-1234-567812345678
+ type: string
+ exercise_type:
+ description: Type of exercise, concerning interaction and layout.
+ example: ddwtos
+ type: string
+ frequency_analysis:
+ description: List of items with frequency data for linguistic phenomena.
+ items:
+ $ref: '#/components/schemas/AnnisResponse_frequency_analysis'
+ type: array
+ graph_data:
+ $ref: '#/components/schemas/GraphData'
+ solutions:
+ description: Correct solutions for this exercise.
+ items:
+ $ref: '#/components/schemas/Solution'
+ type: array
+ text_complexity:
+ $ref: '#/components/schemas/TextComplexity'
+ uri:
+ description: URI for accessing the exercise in this API.
+ example: /mc/api/v1.0/file/fd97630c-1f5a-4102-af56-20eb0babdfee
+ type: string
+ type: object
+ Corpus:
+ description: Collection of texts.
+ example:
+ citation_level_3: Section
+ author: Aulus Gellius
+ source_urn: urn:cts:latinLit:phi1254.phi001.perseus-lat2
+ title: Noctes Atticae
+ citation_level_1: Book
+ cid: 1
+ citation_level_2: Chapter
+ properties:
+ author:
+ default: Anonymus
+ description: Author of the texts in the corpus.
+ example: Aulus Gellius
+ nullable: false
+ type: string
+ cid:
+ description: Unique identifier for the corpus.
+ example: 1
+ type: integer
+ x-primary-key: true
+ x-autoincrement: true
+ citation_level_1:
+ default: default
+ description: First level for citing the corpus.
+ example: Book
+ type: string
+ citation_level_2:
+ default: default
+ description: Second level for citing the corpus.
+ example: Chapter
+ type: string
+ citation_level_3:
+ default: default
+ description: Third level for citing the corpus.
+ example: Section
+ type: string
+ source_urn:
+ description: CTS base URN for referencing the corpus.
+ example: urn:cts:latinLit:phi1254.phi001.perseus-lat2
+ type: string
+ x-unique: true
+ title:
+ default: Anonymus
+ description: Corpus title.
+ example: Noctes Atticae
+ nullable: false
+ type: string
+ required:
+ - source_urn
+ type: object
+ x-tablename: Corpus
+ Exercise:
+ allOf:
+ - $ref: '#/components/schemas/ExerciseBase'
+ - $ref: '#/components/schemas/Exercise_allOf'
+ ExerciseBase:
+ description: Base data for creating and evaluating interactive exercises.
+ properties:
+ correct_feedback:
+ default: ""
+ description: Feedback for successful completion of the exercise.
+ example: Well done!
+ type: string
+ general_feedback:
+ default: ""
+ description: Feedback for finishing the exercise.
+ example: You have finished the exercise.
+ type: string
+ incorrect_feedback:
+ default: ""
+ description: Feedback for failing to complete the exercise successfully.
+ example: Unfortunately, that answer is wrong.
+ type: string
+ instructions:
+ default: ""
+ description: Hints for how to complete the exercise.
+ example: Fill in the gaps!
+ type: string
+ partially_correct_feedback:
+ default: ""
+ description: Feedback for successfully completing certain parts of the exercise.
+ example: Some parts of this answer are correct.
+ type: string
+ search_values:
+ default: '[]'
+ description: Search queries that were used to build the exercise.
+ example: '[''upostag=noun'', ''dependency=object'']'
+ type: string
+ work_author:
+ default: ""
+ description: Name of the person who wrote the base text for the exercise.
+ example: C. Iulius Caesar
+ type: string
+ work_title:
+ default: ""
+ description: Title of the base text for the exercise.
+ example: Noctes Atticae
+ type: string
+ type: object
+ GraphData:
+ description: Nodes, edges and metadata for a graph.
+ example:
+ directed: true
+ nodes:
+ - annis_node_name: urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_xpostag: Ne
+ solution: solution
+ annis_tok: Galliae
+ annis_type: node
+ annis_node_type: node
+ udep_lemma: Gallia
+ udep_feats: Case=Nom|Gender=Fem|Number=Sing
+ id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_upostag: PROPN
+ is_oov: true
+ - annis_node_name: urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_xpostag: Ne
+ solution: solution
+ annis_tok: Galliae
+ annis_type: node
+ annis_node_type: node
+ udep_lemma: Gallia
+ udep_feats: Case=Nom|Gender=Fem|Number=Sing
+ id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_upostag: PROPN
+ is_oov: true
+ links:
+ - annis_component_name: dep
+ annis_component_type: Pointing
+ source: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_deprel: det
+ target: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok3
+ - annis_component_name: dep
+ annis_component_type: Pointing
+ source: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_deprel: det
+ target: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok3
+ graph: {}
+ multigraph: true
+ properties:
+ directed:
+ description: Whether edges in the returned graph are directed.
+ example: true
+ type: boolean
+ graph:
+ description: Additional graph data.
+ example: {}
+ type: object
+ links:
+ description: List of edges for the graph.
+ items:
+ $ref: '#/components/schemas/Link'
+ type: array
+ multigraph:
+ description: Whether the graph consists of multiple subgraphs.
+ example: true
+ type: boolean
+ nodes:
+ description: List of nodes for the graph.
+ items:
+ $ref: '#/components/schemas/Node'
+ type: array
+ required:
+ - links
+ - nodes
+ type: object
+ LearningResult:
+ description: Learner data for completed exercises.
+ properties:
+ actor_account_name:
+ default: ""
+ description: H5P user ID, usually unique per device.
+ example: ebea3f3e-7410-4215-b34d-c1417f7c7c18
+ type: string
+ actor_object_type:
+ default: ""
+ description: Describes the kind of object that was recognized as actor.
+ example: Agent
+ type: string
+ category_id:
+ default: ""
+ description: Link to the exercise type specification.
+ example: http://h5p.org/libraries/H5P.MarkTheWords-1.9
+ type: string
+ category_object_type:
+ default: ""
+ description: Describes the kind of object that was recognized as exercise.
+ example: Activity
+ type: string
+ choices:
+ default: '[]'
+ description: JSON string containing a list of possible choices, each with
+ ID and description.
+ example: |-
+ [{'id':'2','description':{'en-US':'Quintus ist bei allen in der Provinz beliebt.
+ '}},{'id':'3','description':{'en-US':'Asia ist eine unbekannte Provinz.
+ '}}]
+ type: string
+ completion:
+ description: Whether the exercise was fully processed or not.
+ example: true
+ type: boolean
+ correct_responses_pattern:
+ description: JSON string containing a list of possible solutions to the
+ exercise, given as patterns of answers.
+ example: '[''0[,]1[,]2'']'
+ type: string
+ created_time:
+ description: When the learner data was received (POSIX timestamp).
+ example: 1234567.789
+ format: float
+ type: number
+ x-index: true
+ x-primary-key: true
+ duration:
+ default: PT0S
+ description: How many seconds it took a learner to complete the exercise.
+ example: PT9.19S
+ type: string
+ extensions:
+ default: '{}'
+ description: JSON string containing a mapping of keys and values (usually
+ the local content ID, i.e. a versioning mechanism).
+ example: '{''http://h5p.org/x-api/h5p-local-content-id'':1}'
+ type: string
+ interaction_type:
+ default: ""
+ description: Exercise type.
+ example: choice
+ type: string
+ object_definition_description:
+ description: Exercise content, possibly including instructions.
+ example: |
+ Bestimme die Form von custodem im Satz: Urbs custodem non tyrannum, domus hospitem non expilatorem recepit.
+ type: string
+ object_definition_type:
+ default: ""
+ description: Type of object definition that is presented to the user.
+ example: http://adlnet.gov/expapi/activities/cmi.interaction
+ type: string
+ object_object_type:
+ default: ""
+ description: Type of object that is presented to the user.
+ example: Activity
+ type: string
+ response:
+ description: Answer provided by the user, possibly as a pattern.
+ example: His in rebus[,]sociis[,]civibus[,]rei publicae
+ type: string
+ score_max:
+ description: Maximum possible score to be achieved in this exercise.
+ example: 1
+ type: integer
+ score_min:
+ description: Minimum score to be achieved in this exercise.
+ example: 0
+ type: integer
+ score_raw:
+ description: Score that was actually achieved by the user in this exercise.
+ example: 1
+ type: integer
+ score_scaled:
+ default: 0
+ description: Relative score (between 0 and 1) that was actually achieved
+ by the user in this exercise.
+ example: 0.8889
+ format: float
+ type: number
+ success:
+ description: Whether the exercise was successfully completed or not.
+ example: true
+ type: boolean
+ verb_display:
+ default: ""
+ description: Type of action that was performed by the user.
+ example: answered
+ type: string
+ verb_id:
+ default: ""
+ description: Link to the type of action that was performed by the user.
+ example: http://adlnet.gov/expapi/verbs/answered
+ type: string
+ required:
+ - completion
+ - correct_responses_pattern
+ - created_time
+ - object_definition_description
+ - response
+ - score_max
+ - score_min
+ - score_raw
+ - success
+ type: object
+ x-tablename: LearningResult
+ Link:
+ example:
+ annis_component_name: dep
+ annis_component_type: Pointing
+ source: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_deprel: det
+ target: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok3
+ properties:
+ annis_component_name:
+ description: Component name as given by ANNIS.
+ example: dep
+ type: string
+ annis_component_type:
+ description: Component type as given by ANNIS.
+ example: Pointing
+ type: string
+ source:
+ description: ID of the source node for the edge.
+ example: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ type: string
+ target:
+ description: ID of the target node for the edge.
+ example: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok3
+ type: string
+ udep_deprel:
+ description: Dependency relation described by the edge.
+ example: det
+ type: string
+ required:
+ - annis_component_name
+ - annis_component_type
+ - source
+ - target
+ type: object
+ Node:
+ example:
+ annis_node_name: urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_xpostag: Ne
+ solution: solution
+ annis_tok: Galliae
+ annis_type: node
+ annis_node_type: node
+ udep_lemma: Gallia
+ udep_feats: Case=Nom|Gender=Fem|Number=Sing
+ id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ udep_upostag: PROPN
+ is_oov: true
+ properties:
+ annis_node_name:
+ description: Node name as given by ANNIS.
+ example: urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ type: string
+ annis_node_type:
+ description: Node type as given by ANNIS.
+ example: node
+ type: string
+ annis_tok:
+ description: Raw word form as given by ANNIS.
+ example: Galliae
+ type: string
+ annis_type:
+ description: Node type as given by ANNIS (?).
+ example: node
+ type: string
+ id:
+ description: Unique identifier for the node in the SALT model.
+ example: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok1
+ type: string
+ is_oov:
+ description: Whether the raw word form is missing in a given vocabulary.
+ example: true
+ type: boolean
+ udep_lemma:
+ description: Lemmatized word form.
+ example: Gallia
+ type: string
+ udep_upostag:
+ description: Universal part of speech tag for the word form.
+ example: PROPN
+ type: string
+ udep_xpostag:
+ description: Language-specific part of speech tag for the word form.
+ example: Ne
+ type: string
+ udep_feats:
+ description: Additional morphological information.
+ example: Case=Nom|Gender=Fem|Number=Sing
+ type: string
+ solution:
+ description: Solution value for this node in an exercise.
+ type: string
+ required:
+ - annis_node_name
+ - annis_node_type
+ - annis_tok
+ - annis_type
+ - id
+ - udep_lemma
+ - udep_upostag
+ type: object
+ Solution:
+ description: Correct solution for an exercise.
+ example:
+ value:
+ token_id: 9
+ sentence_id: 52548
+ content: unam
+ salt_id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok9
+ target:
+ token_id: 9
+ sentence_id: 52548
+ content: unam
+ salt_id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok9
+ properties:
+ target:
+ $ref: '#/components/schemas/SolutionElement'
+ value:
+ $ref: '#/components/schemas/SolutionElement'
+ type: object
+ SolutionElement:
+ description: Target or value of a correct solution for an exercise.
+ example:
+ token_id: 9
+ sentence_id: 52548
+ content: unam
+ salt_id: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok9
+ properties:
+ content:
+ description: Content of the solution element.
+ example: unam
+ type: string
+ salt_id:
+ description: Unique identifier for the node in the SALT model.
+ example: salt:/urn:custom:latinLit:proiel.caes-gal.lat:1.1.1/doc1#sent52548tok9
+ type: string
+ sentence_id:
+ description: Unique identifier for the sentence in a corpus.
+ example: 52548
+ type: integer
+ token_id:
+ description: Unique identifier for the token in a sentence.
+ example: 9
+ type: integer
+ required:
+ - content
+ - sentence_id
+ - token_id
+ type: object
+ TextComplexity:
+ description: Mapping of various elements of text complexity to their corresponding
+ values.
+ example:
+ all: 42.31
+ n_abl_abs: 1
+ n_clause: 1
+ n_part: 1
+ n_subclause: 1
+ n_punct: 1
+ n_w: 1
+ n_inf: 1
+ n_sent: 1
+ n_types: 1
+ avg_w_per_sent: 5.4
+ lex_den: 0.43
+ avg_w_len: 5.4
+ n_gerund: 1
+ pos: 1
+ properties:
+ all:
+ description: Overall text complexity of the given corpus.
+ example: 42.31
+ format: float
+ type: number
+ avg_w_len:
+ description: Average length of a word in the given corpus.
+ example: 5.4
+ format: float
+ type: number
+ avg_w_per_sent:
+ description: Average number of words per sentence.
+ example: 5.4
+ format: float
+ type: number
+ lex_den:
+ description: Lexical density of the given corpus.
+ example: 0.43
+ format: float
+ maximum: 1
+ minimum: 0
+ type: number
+ n_abl_abs:
+ description: Number of ablativi absoluti in the given corpus.
+ example: 1
+ type: integer
+ n_clause:
+ description: Number of clauses in the given corpus.
+ example: 1
+ type: integer
+ n_gerund:
+ description: Number of gerunds in the given corpus.
+ example: 1
+ type: integer
+ n_inf:
+ description: Number of infinitives in the given corpus.
+ example: 1
+ type: integer
+ n_part:
+ description: Number of participles in the given corpus.
+ example: 1
+ type: integer
+ n_punct:
+ description: Number of punctuation signs in the given corpus.
+ example: 1
+ type: integer
+ n_sent:
+ description: Number of sentences in the given corpus.
+ example: 1
+ type: integer
+ n_subclause:
+ description: Number of subclauses in the given corpus.
+ example: 1
+ type: integer
+ n_types:
+ description: Number of distinct word forms in the given corpus.
+ example: 1
+ type: integer
+ n_w:
+ description: Number of words in the given corpus.
+ example: 1
+ type: integer
+ pos:
+ description: Number of distinct part of speech tags in the given corpus.
+ example: 1
+ type: integer
+ type: object
+ UpdateInfo:
+ description: Timestamps for updates of various resources.
+ properties:
+ created_time:
+ description: When the resource was created (as POSIX timestamp).
+ example: 1234567.789
+ format: float
+ type: number
+ x-index: true
+ last_modified_time:
+ description: When the resource was last modified (as POSIX timestamp).
+ example: 1234567.789
+ format: float
+ type: number
+ x-index: true
+ resource_type:
+ description: Name of the resource for which update timestamps are indexed.
+ enum:
+ - cts_data
+ - exercise_list
+ - file_api_clean
+ example: cts_data
+ type: string
+ x-primary-key: true
+ required:
+ - created_time
+ - last_modified_time
+ - resource_type
+ type: object
+ x-tablename: UpdateInfo
+ AnnisResponse_frequency_analysis:
+ example:
+ values: []
+ count: 1
+ phenomena: []
+ properties:
+ count:
+ description: How often the given combination of values occurred.
+ example: 1
+ type: integer
+ phenomena:
+ description: Labels for the phenomena described in this frequency entry.
+ example: []
+ items:
+ type: string
+ type: array
+ values:
+ description: Values for the phenomena described in this frequency entry.
+ example: []
+ items:
+ type: string
+ type: array
+ Exercise_allOf:
+ description: Data for creating and evaluating interactive exercises.
+ properties:
+ conll:
+ default: ""
+ description: CONLL-formatted linguistic annotations represented as a single
+ string.
+ example: \# newdoc id = ...\n# sent_id = 1\n# text = Caesar fortis est.\n1\tCaesar\tCaeso\tVERB
+ ...
+ nullable: false
+ type: string
+ eid:
+ description: Unique identifier (UUID) for the exercise.
+ example: 12345678-1234-5678-1234-567812345678
+ type: string
+ x-primary-key: true
+ exercise_type:
+ default: ""
+ description: Type of exercise, concerning interaction and layout.
+ example: markWords
+ nullable: false
+ type: string
+ exercise_type_translation:
+ default: ""
+ description: Localized expression of the exercise type.
+ example: Cloze
+ type: string
+ language:
+ default: de
+ description: ISO 639-1 Language Code for the localization of exercise content.
+ example: en
+ type: string
+ last_access_time:
+ description: When the exercise was last accessed (as POSIX timestamp).
+ example: 1234567.789
+ format: float
+ type: number
+ x-index: true
+ solutions:
+ default: '[]'
+ description: Correct solutions for the exercise.
+ example: '[{''target'': {''sentence_id'': 1, ''token_id'': 7, ''salt_id'':
+ ''salt:/urn:...'', ''content'': ''eo''}, ''value'': {''sentence_id'':
+ 0, ''token_id'': 0, ''content'': None, ''salt_id'': ''salt:/urn:...''}}]'
+ nullable: false
+ type: string
+ text_complexity:
+ default: 0
+ description: Overall text complexity as measured by the software's internal
+ language analysis.
+ example: 54.53
+ format: float
+ type: number
+ urn:
+ default: ""
+ description: CTS URN for the text passage from which the exercise was created.
+ example: urn:cts:latinLit:phi0448.phi001.perseus-lat2:1.1.1
+ nullable: false
+ type: string
+ required:
+ - eid
+ - last_access_time
diff --git a/mc_backend/openapi/openapi_server/test/__init__.py b/mc_backend/openapi/openapi_server/test/__init__.py
new file mode 100644
index 0000000..f3b3191
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/test/__init__.py
@@ -0,0 +1,16 @@
+import logging
+
+import connexion
+from flask_testing import TestCase
+
+from openapi.openapi_server.encoder import JSONEncoder
+
+
+class BaseTestCase(TestCase):
+
+ def create_app(self):
+ logging.getLogger('connexion.operation').setLevel('ERROR')
+ app = connexion.App(__name__, specification_dir='../openapi/')
+ app.app.json_encoder = JSONEncoder
+ app.add_api('openapi.yaml', pythonic_params=True)
+ return app.app
diff --git a/mc_backend/openapi/openapi_server/test/test_default_controller.py b/mc_backend/openapi/openapi_server/test/test_default_controller.py
new file mode 100644
index 0000000..f00cee0
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/test/test_default_controller.py
@@ -0,0 +1,123 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+import unittest
+
+from flask import json
+from six import BytesIO
+
+from openapi.openapi_server.models.corpus import Corpus # noqa: E501
+from openapi.openapi_server.models.exercise_base import ExerciseBase # noqa: E501
+from openapi.openapi_server.models.inline_response200 import InlineResponse200 # noqa: E501
+from openapi.openapi_server.models.unknownbasetype import UNKNOWN_BASE_TYPE # noqa: E501
+from openapi.openapi_server.test import BaseTestCase
+
+
+class TestDefaultController(BaseTestCase):
+ """DefaultController integration test stubs"""
+
+ def test_mcserver_app_api_corpus_api_delete(self):
+ """Test case for mcserver_app_api_corpus_api_delete
+
+ Deletes a single corpus by ID.
+ """
+ headers = {
+ 'Accept': 'application/json',
+ }
+ response = self.client.open(
+ '/mc/api/v1.0/corpora/{cid}'.format(cid=1),
+ method='DELETE',
+ headers=headers)
+ self.assert200(response,
+ 'Response body is : ' + response.data.decode('utf-8'))
+
+ def test_mcserver_app_api_corpus_api_get(self):
+ """Test case for mcserver_app_api_corpus_api_get
+
+ Returns a single corpus by ID.
+ """
+ headers = {
+ 'Accept': 'application/json',
+ }
+ response = self.client.open(
+ '/mc/api/v1.0/corpora/{cid}'.format(cid=1),
+ method='GET',
+ headers=headers)
+ self.assert200(response,
+ 'Response body is : ' + response.data.decode('utf-8'))
+
+ def test_mcserver_app_api_corpus_api_patch(self):
+ """Test case for mcserver_app_api_corpus_api_patch
+
+ Updates a single corpus by ID.
+ """
+ query_string = [('author', Aulus Gellius),
+ ('source_urn', urn:cts:latinLit:phi1254.phi001.perseus-lat2),
+ ('title', Noctes Atticae)]
+ headers = {
+ 'Accept': 'application/json',
+ }
+ response = self.client.open(
+ '/mc/api/v1.0/corpora/{cid}'.format(cid=1),
+ method='PATCH',
+ headers=headers,
+ query_string=query_string)
+ self.assert200(response,
+ 'Response body is : ' + response.data.decode('utf-8'))
+
+ def test_mcserver_app_api_corpus_list_api_get(self):
+ """Test case for mcserver_app_api_corpus_list_api_get
+
+ Returns a list of corpora.
+ """
+ query_string = [('last_update_time', 123456789)]
+ headers = {
+ 'Accept': 'application/json',
+ }
+ response = self.client.open(
+ '/mc/api/v1.0/corpora',
+ method='GET',
+ headers=headers,
+ query_string=query_string)
+ self.assert200(response,
+ 'Response body is : ' + response.data.decode('utf-8'))
+
+ def test_mcserver_app_api_exercise_api_get(self):
+ """Test case for mcserver_app_api_exercise_api_get
+
+ Returns exercise data by ID.
+ """
+ query_string = [('eid', 12345678-1234-5678-1234-567812345678)]
+ headers = {
+ 'Accept': 'application/json',
+ }
+ response = self.client.open(
+ '/mc/api/v1.0/exercise',
+ method='GET',
+ headers=headers,
+ query_string=query_string)
+ self.assert200(response,
+ 'Response body is : ' + response.data.decode('utf-8'))
+
+ @unittest.skip("application/x-www-form-urlencoded not supported by Connexion")
+ def test_mcserver_app_api_exercise_api_post(self):
+ """Test case for mcserver_app_api_exercise_api_post
+
+ Creates a new exercise.
+ """
+ unknown_base_type = {}
+ headers = {
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ }
+ response = self.client.open(
+ '/mc/api/v1.0/exercise',
+ method='POST',
+ headers=headers,
+ data=json.dumps(unknown_base_type),
+ content_type='application/x-www-form-urlencoded')
+ self.assert200(response,
+ 'Response body is : ' + response.data.decode('utf-8'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/mc_backend/openapi/openapi_server/typing_utils.py b/mc_backend/openapi/openapi_server/typing_utils.py
new file mode 100644
index 0000000..0563f81
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/typing_utils.py
@@ -0,0 +1,32 @@
+# coding: utf-8
+
+import sys
+
+if sys.version_info < (3, 7):
+ import typing
+
+ def is_generic(klass):
+ """ Determine whether klass is a generic class """
+ return type(klass) == typing.GenericMeta
+
+ def is_dict(klass):
+ """ Determine whether klass is a Dict """
+ return klass.__extra__ == dict
+
+ def is_list(klass):
+ """ Determine whether klass is a List """
+ return klass.__extra__ == list
+
+else:
+
+ def is_generic(klass):
+ """ Determine whether klass is a generic class """
+ return hasattr(klass, '__origin__')
+
+ def is_dict(klass):
+ """ Determine whether klass is a Dict """
+ return klass.__origin__ == dict
+
+ def is_list(klass):
+ """ Determine whether klass is a List """
+ return klass.__origin__ == list
diff --git a/mc_backend/openapi/openapi_server/util.py b/mc_backend/openapi/openapi_server/util.py
new file mode 100644
index 0000000..8ce9198
--- /dev/null
+++ b/mc_backend/openapi/openapi_server/util.py
@@ -0,0 +1,142 @@
+import datetime
+
+import six
+import typing
+from openapi.openapi_server import typing_utils
+
+
+def _deserialize(data, klass):
+ """Deserializes dict, list, str into an object.
+
+ :param data: dict, list or str.
+ :param klass: class literal, or string of class name.
+
+ :return: object.
+ """
+ if data is None:
+ return None
+
+ if klass in six.integer_types or klass in (float, str, bool, bytearray):
+ return _deserialize_primitive(data, klass)
+ elif klass == object:
+ return _deserialize_object(data)
+ elif klass == datetime.date:
+ return deserialize_date(data)
+ elif klass == datetime.datetime:
+ return deserialize_datetime(data)
+ elif typing_utils.is_generic(klass):
+ if typing_utils.is_list(klass):
+ return _deserialize_list(data, klass.__args__[0])
+ if typing_utils.is_dict(klass):
+ return _deserialize_dict(data, klass.__args__[1])
+ else:
+ return deserialize_model(data, klass)
+
+
+def _deserialize_primitive(data, klass):
+ """Deserializes to primitive type.
+
+ :param data: data to deserialize.
+ :param klass: class literal.
+
+ :return: int, long, float, str, bool.
+ :rtype: int | long | float | str | bool
+ """
+ try:
+ value = klass(data)
+ except UnicodeEncodeError:
+ value = six.u(data)
+ except TypeError:
+ value = data
+ return value
+
+
+def _deserialize_object(value):
+ """Return an original value.
+
+ :return: object.
+ """
+ return value
+
+
+def deserialize_date(string):
+ """Deserializes string to date.
+
+ :param string: str.
+ :type string: str
+ :return: date.
+ :rtype: date
+ """
+ try:
+ from dateutil.parser import parse
+ return parse(string).date()
+ except ImportError:
+ return string
+
+
+def deserialize_datetime(string):
+ """Deserializes string to datetime.
+
+ The string should be in iso8601 datetime format.
+
+ :param string: str.
+ :type string: str
+ :return: datetime.
+ :rtype: datetime
+ """
+ try:
+ from dateutil.parser import parse
+ return parse(string)
+ except ImportError:
+ return string
+
+
+def deserialize_model(data, klass):
+ """Deserializes list or dict to model.
+
+ :param data: dict, list.
+ :type data: dict | list
+ :param klass: class literal.
+ :return: model object.
+ """
+ instance = klass()
+
+ if not instance.openapi_types:
+ return data
+
+ for attr, attr_type in six.iteritems(instance.openapi_types):
+ if data is not None \
+ and instance.attribute_map[attr] in data \
+ and isinstance(data, (list, dict)):
+ value = data[instance.attribute_map[attr]]
+ setattr(instance, attr, _deserialize(value, attr_type))
+
+ return instance
+
+
+def _deserialize_list(data, boxed_type):
+ """Deserializes a list and its elements.
+
+ :param data: list to deserialize.
+ :type data: list
+ :param boxed_type: class literal.
+
+ :return: deserialized list.
+ :rtype: list
+ """
+ return [_deserialize(sub_data, boxed_type)
+ for sub_data in data]
+
+
+def _deserialize_dict(data, boxed_type):
+ """Deserializes a dict and its elements.
+
+ :param data: dict to deserialize.
+ :type data: dict
+ :param boxed_type: class literal.
+
+ :return: deserialized dict.
+ :rtype: dict
+ """
+ return {k: _deserialize(v, boxed_type)
+ for k, v in six.iteritems(data)}
diff --git a/mc_backend/openapi/requirements.txt b/mc_backend/openapi/requirements.txt
new file mode 100644
index 0000000..72ed547
--- /dev/null
+++ b/mc_backend/openapi/requirements.txt
@@ -0,0 +1,10 @@
+connexion[swagger-ui] >= 2.6.0; python_version>="3.6"
+# 2.3 is the last version that supports python 3.4-3.5
+connexion[swagger-ui] <= 2.3.0; python_version=="3.5" or python_version=="3.4"
+# connexion requires werkzeug but connexion < 2.4.0 does not install werkzeug
+# we must peg werkzeug versions below to fix connexion
+# https://github.com/zalando/connexion/pull/1044
+werkzeug == 0.16.1; python_version=="3.5" or python_version=="3.4"
+swagger-ui-bundle >= 0.0.2
+python_dateutil >= 2.6.0
+setuptools >= 21.0.0
diff --git a/mc_backend/openapi/setup.py b/mc_backend/openapi/setup.py
new file mode 100644
index 0000000..acb8346
--- /dev/null
+++ b/mc_backend/openapi/setup.py
@@ -0,0 +1,39 @@
+# coding: utf-8
+
+import sys
+from setuptools import setup, find_packages
+
+NAME = "openapi_server"
+VERSION = "1.0.0"
+
+# To install the library, run the following
+#
+# python setup.py install
+#
+# prerequisite: setuptools
+# http://pypi.python.org/pypi/setuptools
+
+REQUIRES = [
+ "connexion>=2.0.2",
+ "swagger-ui-bundle>=0.0.2",
+ "python_dateutil>=2.6.0"
+]
+
+setup(
+ name=NAME,
+ version=VERSION,
+ description="Machina Callida Backend REST API",
+ author_email="",
+ url="",
+ keywords=["OpenAPI", "Machina Callida Backend REST API"],
+ install_requires=REQUIRES,
+ packages=find_packages(),
+ package_data={'': ['openapi/openapi.yaml']},
+ include_package_data=True,
+ entry_points={
+ 'console_scripts': ['openapi_server=openapi_server.__main__:main']},
+ long_description="""\
+ No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
+ """
+)
+
diff --git a/mc_backend/openapi/test-requirements.txt b/mc_backend/openapi/test-requirements.txt
new file mode 100644
index 0000000..0970f28
--- /dev/null
+++ b/mc_backend/openapi/test-requirements.txt
@@ -0,0 +1,4 @@
+pytest~=4.6.7 # needed for python 2.7+3.4
+pytest-cov>=2.8.1
+pytest-randomly==1.2.3 # needed for python 2.7+3.4
+Flask-Testing==0.8.0
diff --git a/mc_backend/openapi/tox.ini b/mc_backend/openapi/tox.ini
new file mode 100644
index 0000000..f66b2d8
--- /dev/null
+++ b/mc_backend/openapi/tox.ini
@@ -0,0 +1,11 @@
+[tox]
+envlist = py3
+skipsdist=True
+
+[testenv]
+deps=-r{toxinidir}/requirements.txt
+ -r{toxinidir}/test-requirements.txt
+ {toxinidir}
+
+commands=
+ pytest --cov=openapi_server
diff --git a/mc_backend/openapi_generator.py b/mc_backend/openapi_generator.py
new file mode 100644
index 0000000..ff06801
--- /dev/null
+++ b/mc_backend/openapi_generator.py
@@ -0,0 +1,15 @@
+"""OpenAPI Generator creates models that can only be used as top-level package, not as submodule.
+Therefore, if we still want to use them from a subdirectory, we need to adjust the imports manually."""
+import glob
+import os
+
+openapi_dir: str = "openapi"
+for file_path in glob.iglob(os.path.join(openapi_dir, "**/*"), recursive=True):
+ if not os.path.isdir(file_path) and file_path[-3:] == ".py":
+ content: str
+ with open(file_path) as f:
+ content = f.read()
+ content = content.replace("from openapi_server", f"from {openapi_dir}.openapi_server")
+ content = content.replace("import openapi_server", f"import {openapi_dir}.openapi_server")
+ with open(file_path, "w+") as f2:
+ f2.write(content)
diff --git a/mc_backend/tests.py b/mc_backend/tests.py
index 9f0e8d8..c24331c 100644
--- a/mc_backend/tests.py
+++ b/mc_backend/tests.py
@@ -1,6 +1,5 @@
"""Unit tests for testing the application functionality."""
import copy
-import logging
import ntpath
import os
import uuid
@@ -23,7 +22,6 @@ from typing import Dict, List, Tuple, Type, Any
from conllu import TokenList
from flask import Flask
-from flask.testing import FlaskClient
from gensim.models import Word2Vec
from lxml import etree
from networkx import MultiDiGraph, Graph
@@ -39,8 +37,9 @@ from mcserver.app import create_app, db, start_updater, full_init
from mcserver.app.api.exerciseAPI import map_exercise_data_to_database
from mcserver.app.models import ResourceType, FileType, ExerciseType, ExerciseData, \
NodeMC, LinkMC, GraphData, Phenomenon, CustomCorpus, AnnisResponse, Solution, DownloadableFile, Language, \
- VocabularyCorpus, SolutionElement, TextComplexityMeasure, FrequencyAnalysis, CitationLevel, FrequencyItem, \
- TextComplexity, Dependency, PartOfSpeech, Choice, XapiStatement, ExerciseMC, CorpusMC
+ VocabularyCorpus, TextComplexityMeasure, FrequencyAnalysis, CitationLevel, FrequencyItem, \
+ TextComplexity, Dependency, PartOfSpeech, Choice, XapiStatement, ExerciseMC, CorpusMC, \
+ make_solution_element_from_salt_id
from mcserver.app.services import AnnotationService, CorpusService, FileService, CustomCorpusService, DatabaseService, \
XMLservice, TextService
from mcserver.config import TestingConfig, Config
@@ -52,40 +51,21 @@ class McTestCase(unittest.TestCase):
"""The test suite for the main application."""
def mocked_requests_get(*args, **kwargs):
- if TestingConfig.SIMULATE_CORPUS_NOT_FOUND:
- return MockResponse(json.dumps(AnnisResponse().__dict__))
- elif TestingConfig.SIMULATE_HTTP_ERROR:
+ if TestingConfig.SIMULATE_HTTP_ERROR:
raise HTTPError
- elif TestingConfig.SIMULATE_EMPTY_GRAPH:
- graph_data_raw: dict = dict(Mocks.annis_response_dict["graph_data_raw"])
- graph_data_raw["nodes"] = []
- return MockResponse(json.dumps(graph_data_raw))
- url: str = args[0]
- if url == Config.CTS_API_BASE_URL:
- if kwargs['params']['request'] == 'GetCapabilities':
- return MockResponse(Mocks.cts_capabilities_xml)
- return MockResponse(Mocks.cts_reff_xml)
- elif url.endswith(Config.SERVER_URI_CSM_SUBGRAPH):
- return MockResponse(json.dumps(Mocks.annis_response_dict))
else:
- gd: GraphData = AnnotationService.map_graph_data(Mocks.annis_response_dict["graph_data_raw"])
- return MockResponse(json.dumps(gd.serialize()))
+ url: str = args[0]
+ if url == Config.CTS_API_BASE_URL:
+ if kwargs['params']['request'] == 'GetCapabilities':
+ return MockResponse(Mocks.cts_capabilities_xml)
+ return MockResponse(Mocks.cts_reff_xml)
+ elif url.endswith(Config.SERVER_URI_CSM_SUBGRAPH):
+ return MockResponse(json.dumps(Mocks.annis_response_dict))
def mocked_requests_post(*args, **kwargs):
url: str = args[0]
if url.endswith(Config.SERVER_URI_TEXT_COMPLEXITY):
return MockResponse(Mocks.text_complexity_json_string)
- else:
- ed1: ExerciseData = AnnotationService.map_graph_data_to_exercise(
- Mocks.annis_response_dict["graph_data_raw"],
- "", [Solution(target=SolutionElement(
- salt_id='salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent159692tok1'))])
- ed2: ExerciseData = AnnotationService.map_graph_data_to_exercise(
- Mocks.annis_response_dict["graph_data_raw"],
- "", [Solution(target=SolutionElement(
- salt_id='salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent159695tok10'))])
- ed2.graph.nodes = ed2.graph.nodes[42:]
- return MockResponse(json.dumps([ed1.serialize(), ed2.serialize()]))
def setUp(self):
"""Initializes the testing environment."""
@@ -199,6 +179,7 @@ class McTestCase(unittest.TestCase):
def test_api_exercise_get(self):
""" Retrieves an existing exercise by its exercise ID. """
+ db.session.query(Exercise).delete()
response: Response = Mocks.app_dict[self.class_name].client.get(Config.SERVER_URI_EXERCISE,
query_string=dict(eid=""))
self.assertEqual(response.status_code, 404)
@@ -206,7 +187,8 @@ class McTestCase(unittest.TestCase):
Mocks.exercise.urn = ""
db.session.add(Mocks.exercise)
db.session.commit()
- with patch.object(CorpusService, "get_corpus", side_effect=[AnnisResponse(), Mocks.annis_response]):
+ ar: AnnisResponse = AnnisResponse(solutions=[], graph_data=GraphData(links=[], nodes=[]))
+ with patch.object(CorpusService, "get_corpus", side_effect=[ar, Mocks.annis_response]):
response = Mocks.app_dict[self.class_name].client.get(Config.SERVER_URI_EXERCISE,
query_string=dict(eid=Mocks.exercise.eid))
self.assertEqual(response.status_code, 404)
@@ -214,12 +196,44 @@ class McTestCase(unittest.TestCase):
db.session.commit()
response = Mocks.app_dict[self.class_name].client.get(Config.SERVER_URI_EXERCISE,
query_string=dict(eid=Mocks.exercise.eid))
- graph_dict: dict = json.loads(response.data.decode("utf-8"))
- ar: AnnisResponse = AnnisResponse(json_dict=graph_dict)
- self.assertEqual(len(ar.nodes), 52)
+ graph_dict: dict = json.loads(response.get_data(as_text=True))
+ ar: AnnisResponse = AnnisResponse.from_dict(graph_dict)
+ self.assertEqual(len(ar.graph_data.nodes), 52)
db.session.query(Exercise).delete()
session.make_transient(Mocks.exercise)
+ def test_api_exercise_post(self):
+ """ Creates a new exercise from scratch. """
+
+ def post_response(*args, **kwargs):
+ url: str = args[0]
+ if url.endswith("/"):
+ return MockResponse("}{")
+ elif url.endswith(str(Config.CORPUS_STORAGE_MANAGER_PORT)):
+ return MockResponse(json.dumps(Mocks.annis_response_dict))
+ else:
+ return MockResponse(Mocks.text_complexity_json_string)
+
+ db.session.query(UpdateInfo).delete()
+ ui_exercises: UpdateInfo = UpdateInfo.from_dict(resource_type=ResourceType.exercise_list.name,
+ last_modified_time=1, created_time=1)
+ db.session.add(ui_exercises)
+ db.session.commit()
+ data_dict: dict = dict(urn=Mocks.exercise.urn, type=ExerciseType.matching.value,
+ search_values=Mocks.exercise.search_values, instructions='abc')
+ with patch.object(mcserver.app.api.exerciseAPI.requests, "post", side_effect=post_response):
+ response: Response = Mocks.app_dict[self.class_name].client.post(
+ Config.SERVER_URI_EXERCISE, headers=Mocks.headers_form_data, data=data_dict)
+ ar: AnnisResponse = AnnisResponse.from_dict(json.loads(response.get_data(as_text=True)))
+ self.assertEqual(len(ar.solutions), 3)
+ Config.CORPUS_STORAGE_MANAGER_PORT = f"{Config.CORPUS_STORAGE_MANAGER_PORT}/"
+ response: Response = Mocks.app_dict[self.class_name].client.post(
+ Config.SERVER_URI_EXERCISE, headers=Mocks.headers_form_data, data=data_dict)
+ self.assertEqual(response.status_code, 500)
+ Config.CORPUS_STORAGE_MANAGER_PORT = int(Config.CORPUS_STORAGE_MANAGER_PORT[:-1])
+ Mocks.app_dict[self.class_name].app_context.push()
+ db.session.query(UpdateInfo).delete()
+
def test_api_exercise_list_get(self):
""" Retrieves a list of available exercises. """
ui_exercises: UpdateInfo = UpdateInfo.from_dict(resource_type=ResourceType.exercise_list.name,
@@ -330,13 +344,23 @@ class McTestCase(unittest.TestCase):
db.session.query(Exercise).delete()
session.make_transient(Mocks.exercise)
- @patch('mcserver.app.api.kwicAPI.requests.post', side_effect=mocked_requests_post)
- def test_api_kwic_post(self, mock_post: MagicMock):
+ def test_api_kwic_post(self):
""" Posts an AQL query to create a KWIC visualization in SVG format. """
+ ed1: ExerciseData = AnnotationService.map_graph_data_to_exercise(
+ Mocks.annis_response_dict["graph_data_raw"],
+ "", [Solution(target=make_solution_element_from_salt_id(
+ 'salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent159692tok1'))])
+ ed2: ExerciseData = AnnotationService.map_graph_data_to_exercise(
+ Mocks.annis_response_dict["graph_data_raw"],
+ "", [Solution(target=make_solution_element_from_salt_id(
+ 'salt:/urn:custom:latinLit:proiel.pal-agr.lat:1.1.1/doc1#sent159695tok10'))])
+ ed2.graph.nodes = ed2.graph.nodes[42:]
+ mr: MockResponse = MockResponse(json.dumps([ed1.serialize(), ed2.serialize()]))
data_dict: dict = dict(search_values=Mocks.exercise.search_values, urn=Mocks.urn_custom)
- response: Response = Mocks.app_dict[self.class_name].client.post(
- TestingConfig.SERVER_URI_KWIC, headers=Mocks.headers_form_data, data=data_dict)
- self.assertTrue(response.data.startswith(Mocks.kwic_svg))
+ with patch.object(mcserver.app.services.corpusService.requests, "post", return_value=mr):
+ response: Response = Mocks.app_dict[self.class_name].client.post(
+ TestingConfig.SERVER_URI_KWIC, headers=Mocks.headers_form_data, data=data_dict)
+ self.assertTrue(response.data.startswith(Mocks.kwic_svg))
def test_api_not_found(self):
"""Checks the 404 response in case of an invalid API query URL."""
@@ -344,22 +368,25 @@ class McTestCase(unittest.TestCase):
self.assertEqual(response.status_code, 404)
@patch('mcserver.app.services.textComplexityService.requests.post', side_effect=mocked_requests_post)
- @patch('mcserver.app.services.corpusService.requests.get', side_effect=mocked_requests_get)
- def test_api_raw_text_get(self, mock_post_tcs: MagicMock, mock_get_cs: MagicMock):
+ def test_api_raw_text_get(self, mock_post_tcs: MagicMock):
""" Retrieves the raw text for a given URN. """
- TestingConfig.SIMULATE_CORPUS_NOT_FOUND = True
- response: Response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_RAW_TEXT,
- query_string=dict(urn=Mocks.urn_custom))
- self.assertEqual(response.status_code, 404)
- TestingConfig.SIMULATE_CORPUS_NOT_FOUND = False
- response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_RAW_TEXT,
- query_string=dict(urn=Mocks.urn_custom))
- self.assertEqual(len(json.loads(response.data.decode("utf-8"))["nodes"]), 52)
- TestingConfig.SIMULATE_EMPTY_GRAPH = True
- response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_RAW_TEXT,
- query_string=dict(urn=Mocks.urn_custom))
- self.assertEqual(response.status_code, 404)
- TestingConfig.SIMULATE_EMPTY_GRAPH = False
+ with patch.object(mcserver.app.services.corpusService.requests, "get") as mock_get_cs:
+ mock_get_cs.return_value = MockResponse(
+ json.dumps(AnnisResponse(graph_data=GraphData(links=[], nodes=[]), solutions=[]).to_dict()))
+ response: Response = Mocks.app_dict[self.class_name].client.get(
+ TestingConfig.SERVER_URI_RAW_TEXT, query_string=dict(urn=Mocks.urn_custom))
+ self.assertEqual(response.status_code, 404)
+ mock_get_cs.return_value = MockResponse(json.dumps(Mocks.graph_data.to_dict()))
+ response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_RAW_TEXT,
+ query_string=dict(urn=Mocks.urn_custom))
+ ar: AnnisResponse = AnnisResponse.from_dict(json.loads(response.get_data(as_text=True)))
+ self.assertEqual(len(ar.graph_data.nodes), 52)
+ graph_data_raw: dict = dict(Mocks.annis_response_dict["graph_data_raw"])
+ graph_data_raw["nodes"] = []
+ mock_get_cs.return_value = MockResponse(json.dumps(graph_data_raw))
+ response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_RAW_TEXT,
+ query_string=dict(urn=Mocks.urn_custom))
+ self.assertEqual(response.status_code, 404)
def test_api_static_exercises_get(self):
""" Retrieves static exercises from the frontend and publishes deep URLs for each one of them. """
@@ -402,18 +429,19 @@ class McTestCase(unittest.TestCase):
ar: AnnisResponse = CorpusService.get_subgraph(Mocks.urn_custom, 'tok="quarum"', 0, 0, False)
self.assertEqual(len(ar.solutions), 3)
- @patch('mcserver.app.services.corpusService.requests.get', side_effect=mocked_requests_get)
@patch('mcserver.app.services.textComplexityService.requests.post', side_effect=mocked_requests_post)
- def test_api_text_complexity_get(self, mock_get: MagicMock, mock_post: MagicMock):
+ def test_api_text_complexity_get(self, mock_post: MagicMock):
""" Calculates text complexity measures for a given URN. """
- args: dict = dict(urn=Mocks.urn_custom, measure=TextComplexityMeasure.all.name)
- response: Response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_TEXT_COMPLEXITY,
- query_string=args)
- self.assertEqual(response.data.decode("utf-8"), Mocks.text_complexity_json_string)
- args["measure"] = "n_w"
- response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_TEXT_COMPLEXITY,
- query_string=args)
- self.assertEqual(json.loads(response.data.decode("utf-8"))["n_w"], 52)
+ with patch.object(mcserver.app.services.corpusService.requests, "get",
+ return_value=MockResponse(json.dumps(Mocks.graph_data.to_dict()))):
+ args: dict = dict(urn=Mocks.urn_custom, measure=TextComplexityMeasure.all.name)
+ response: Response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_TEXT_COMPLEXITY,
+ query_string=args)
+ self.assertEqual(response.get_data(as_text=True), Mocks.text_complexity_json_string)
+ args["measure"] = "n_w"
+ response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_TEXT_COMPLEXITY,
+ query_string=args)
+ self.assertEqual(json.loads(response.get_data(as_text=True))["n_w"], 52)
@patch('MyCapytain.retrievers.cts5.requests.get', side_effect=mocked_requests_get)
def test_api_valid_reff_get(self, mock_get: MagicMock): #
@@ -458,20 +486,22 @@ class McTestCase(unittest.TestCase):
headers=Mocks.headers_form_data, data=data_dict)
self.assertEqual(len(json.loads(response.data.decode("utf-8"))), 2)
- @patch('mcserver.app.services.corpusService.requests.get', side_effect=mocked_requests_get)
@patch('mcserver.app.services.textComplexityService.requests.post', side_effect=mocked_requests_post)
- def test_api_vocabulary_get(self, mock_get: MagicMock, mock_post: MagicMock):
+ def test_api_vocabulary_get(self, mock_post: MagicMock):
""" Calculates lexical overlap between a text (specified by URN) and a static vocabulary. """
- args: dict = dict(query_urn=Mocks.urn_custom, show_oov=True, vocabulary=VocabularyCorpus.agldt.name,
- frequency_upper_bound=500)
- response: Response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_VOCABULARY,
- query_string=args)
- ar: AnnisResponse = AnnisResponse(json_dict=json.loads(response.data.decode("utf-8")))
- self.assertTrue(NodeMC(json_dict=ar.nodes[3]).is_oov)
- args["show_oov"] = False
- args["frequency_upper_bound"] = 6000
- response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_VOCABULARY, query_string=args)
- self.assertEqual(json.loads(response.data.decode("utf-8"))[0]["matching_degree"], 90.9090909090909)
+ with patch.object(mcserver.app.services.corpusService.requests, "get",
+ return_value=MockResponse(json.dumps(Mocks.graph_data.to_dict()))):
+ args: dict = dict(query_urn=Mocks.urn_custom, show_oov=True, vocabulary=VocabularyCorpus.agldt.name,
+ frequency_upper_bound=500)
+ response: Response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_VOCABULARY,
+ query_string=args)
+ ar: AnnisResponse = AnnisResponse.from_dict(json.loads(response.get_data(as_text=True)))
+ self.assertTrue(NodeMC.from_dict(ar.graph_data.nodes[3].to_dict()).is_oov)
+ args["show_oov"] = False
+ args["frequency_upper_bound"] = 6000
+ response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_VOCABULARY,
+ query_string=args)
+ self.assertEqual(json.loads(response.data.decode("utf-8"))[0]["matching_degree"], 90.9090909090909)
def test_app_init(self):
"""Creates a CSM app in testing mode."""
@@ -536,7 +566,7 @@ class McTestCase(unittest.TestCase):
db.session.commit()
exercise_expected: Exercise = Mocks.exercise
exercise: Exercise = map_exercise_data_to_database(
- solutions=[Solution(json_dict=x) for x in json.loads(exercise_expected.solutions)],
+ solutions=[Solution.from_dict(x) for x in json.loads(exercise_expected.solutions)],
exercise_data=Mocks.exercise_data, instructions=exercise_expected.instructions,
exercise_type=exercise_expected.exercise_type,
exercise_type_translation=exercise_expected.exercise_type_translation, xml_guid=exercise_expected.eid,
@@ -583,27 +613,9 @@ class McTestCase(unittest.TestCase):
class CsmTestCase(unittest.TestCase):
"""The test suite for the Corpus Storage Manager application."""
- @staticmethod
- def set_up_mcserver() -> FlaskClient:
- app: Flask = create_app(TestingConfig)
- app.logger.setLevel(logging.CRITICAL)
- app.testing = True
- return app.test_client()
-
- def mocked_requests_post(*args, **kwargs):
- url: str = args[0]
- if url.endswith(Config.SERVER_URI_TEXT_COMPLEXITY):
- return MockResponse(Mocks.text_complexity_json_string)
- elif url[-1] == '/':
- return MockResponse("}{")
- else:
- return MockResponse(json.dumps(Mocks.annis_response_dict))
-
def setUp(self):
"""Initializes the testing environment."""
self.start_time = time.time()
- if os.path.exists(Config.GRAPH_DATABASE_DIR):
- shutil.rmtree(Config.GRAPH_DATABASE_DIR)
self.class_name: str = str(self.__class__)
TestHelper.update_flask_app(self.class_name, create_csm_app)
@@ -632,43 +644,21 @@ class CsmTestCase(unittest.TestCase):
def test_api_csm_get(self):
"""Gets the raw text for a specific URN."""
- ret_vals: List[AnnisResponse] = [AnnisResponse(), Mocks.annis_response]
+ ret_vals: List[AnnisResponse] = [
+ AnnisResponse(graph_data=GraphData(links=[], nodes=[])), Mocks.annis_response]
with patch.object(CorpusService, "get_corpus", side_effect=ret_vals):
response: Response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_CSM,
query_string=dict(urn=Mocks.urn[:5]))
self.assertEqual(response.status_code, 404)
response: Response = Mocks.app_dict[self.class_name].client.get(TestingConfig.SERVER_URI_CSM,
query_string=dict(urn=Mocks.urn_custom))
- graph_data_raw: dict = json.loads(response.get_data().decode("utf-8"))
- graph_data: GraphData = GraphData(json_dict=graph_data_raw)
- text_raw = " ".join(x.annis_tok for x in graph_data.nodes)
+ ar: AnnisResponse = AnnisResponse.from_dict(json.loads(response.get_data(as_text=True)))
+ text_raw = " ".join(x.annis_tok for x in ar.graph_data.nodes)
# remove the spaces before punctuation because, otherwise, the parser won't work correctly
received_text: str = re.sub('[ ]([{0}])'.format(string.punctuation), r'\1', text_raw)
expected_text: str = "Pars est prima prudentiae ipsam cui praecepturus es aestimare personam."
self.assertIn(expected_text, received_text)
- @patch('mcserver.app.services.corpusService.requests.post', side_effect=mocked_requests_post)
- @patch('mcserver.app.services.textComplexityService.requests.post', side_effect=mocked_requests_post)
- def test_api_exercise_post(self, mock_post_cs: MagicMock, mock_post_tcs: MagicMock):
- """ Creates a new exercise from scratch. """
- db.session.query(UpdateInfo).delete()
- ui_exercises: UpdateInfo = UpdateInfo.from_dict(resource_type=ResourceType.exercise_list.name,
- last_modified_time=1, created_time=1)
- db.session.add(ui_exercises)
- db.session.commit()
- client: FlaskClient = CsmTestCase.set_up_mcserver()
- data_dict: dict = dict(urn=Mocks.exercise.urn, type=ExerciseType.matching.value,
- search_values=Mocks.exercise.search_values, instructions='abc')
- response: Response = client.post(Config.SERVER_URI_EXERCISE, headers=Mocks.headers_form_data, data=data_dict)
- ar: AnnisResponse = AnnisResponse(json_dict=json.loads(response.data.decode("utf-8")))
- self.assertEqual(len(ar.solutions), 3)
- Config.CORPUS_STORAGE_MANAGER_PORT = f"{Config.CORPUS_STORAGE_MANAGER_PORT}/"
- response: Response = client.post(Config.SERVER_URI_EXERCISE, headers=Mocks.headers_form_data, data=data_dict)
- self.assertEqual(response.status_code, 500)
- Config.CORPUS_STORAGE_MANAGER_PORT = int(Config.CORPUS_STORAGE_MANAGER_PORT[:-1])
- Mocks.app_dict[self.class_name].app_context.push()
- db.session.query(UpdateInfo).delete()
-
def test_api_frequency_get(self):
""" Requests a frequency analysis for a given URN. """
expected_fa: FrequencyAnalysis = FrequencyAnalysis()
@@ -707,17 +697,18 @@ class CsmTestCase(unittest.TestCase):
args: dict = dict(urn=Mocks.urn_custom, measure=TextComplexityMeasure.all.name)
response: Response = Mocks.app_dict[self.class_name].client.post(TestingConfig.SERVER_URI_TEXT_COMPLEXITY,
data=json.dumps(args))
- tc: TextComplexity = TextComplexity(json_dict=json.loads(response.data.decode("utf-8")))
+ tc: TextComplexity = TextComplexity.from_dict(json.loads(response.get_data(as_text=True)))
self.assertEqual(tc.pos, 5)
args["measure"] = "n_w"
response = Mocks.app_dict[self.class_name].client.post(TestingConfig.SERVER_URI_TEXT_COMPLEXITY,
data=json.dumps(args))
- tc = TextComplexity(json_dict=json.loads(response.data.decode("utf-8")))
+ tc = TextComplexity.from_dict(json.loads(response.get_data(as_text=True)))
self.assertEqual(tc.n_w, 6)
@patch('mcserver.app.services.corpusService.CorpusService.update_corpora')
def test_check_corpus_list_age(self, mock_update: MagicMock):
"""Checks whether the list of available corpora needs to be updated."""
+ db.session.query(UpdateInfo).delete()
ui_cts: UpdateInfo = UpdateInfo.from_dict(resource_type=ResourceType.cts_data.name,
last_modified_time=1, created_time=1)
db.session.add(ui_cts)
@@ -839,6 +830,7 @@ class CsmTestCase(unittest.TestCase):
with patch.object(csm, "get_app") as mock_get_app:
csm.run_app()
self.assertEqual(mock_get_app.call_count, 1)
+ Mocks.app_dict[self.class_name].app_context.push()
class CommonTestCase(unittest.TestCase):
@@ -914,7 +906,7 @@ class CommonTestCase(unittest.TestCase):
def test_get_corpus(self):
""" Loads the text for a standard corpus from the CTS API or cache. """
ar: AnnisResponse = CorpusService.get_corpus("", True)
- self.assertEqual(len(ar.nodes), 0)
+ self.assertEqual(len(ar.graph_data.nodes), 0)
def test_get_custom_corpus_annotations(self):
""" Retrieves the annotated text for a custom non-PROIEL corpus, e.g. a textbook. """
@@ -948,7 +940,7 @@ class CommonTestCase(unittest.TestCase):
def test_get_pdf_html_string(self):
""" Builds an HTML string from an exercise, e.g. to construct a PDF from it. """
Mocks.exercise.exercise_type = ExerciseType.matching.value
- solutions: List[Solution] = [Solution(json_dict=x) for x in json.loads(Mocks.exercise.solutions)]
+ solutions: List[Solution] = [Solution.from_dict(x) for x in json.loads(Mocks.exercise.solutions)]
result: str = FileService.get_pdf_html_string(Mocks.exercise, Mocks.annotations, FileType.pdf, solutions)
self.assertEqual(result, '
:
')
Mocks.exercise.exercise_type = ExerciseType.markWords.value
@@ -1009,8 +1001,9 @@ class CommonTestCase(unittest.TestCase):
self.assertEqual(len(cc.text_parts), 1)
def test_init_db_alembic(self):
- """ In Docker, the alembic version is not initially written to the database, so we need to set it manually. """
- db.engine.execute(f"DROP TABLE {Config.DATABASE_TABLE_ALEMBIC}")
+ """In Docker, the alembic version is not initially written to the database, so we need to set it manually."""
+ if db.engine.dialect.has_table(db.engine, Config.DATABASE_TABLE_ALEMBIC):
+ db.engine.execute(f"DROP TABLE {Config.DATABASE_TABLE_ALEMBIC}")
self.assertEqual(db.engine.dialect.has_table(db.engine, Config.DATABASE_TABLE_ALEMBIC), False)
DatabaseService.init_db_alembic()
self.assertEqual(db.engine.dialect.has_table(db.engine, Config.DATABASE_TABLE_ALEMBIC), True)
@@ -1064,7 +1057,7 @@ class CommonTestCase(unittest.TestCase):
def test_make_docx_file(self):
""" Saves an exercise to a DOCX file (e.g. for later download). """
file_path: str = os.path.join(Config.TMP_DIRECTORY, "make_docx_file.docx")
- solutions: List[Solution] = [Solution(json_dict=x) for x in json.loads(Mocks.exercise.solutions)]
+ solutions: List[Solution] = [Solution.from_dict(x) for x in json.loads(Mocks.exercise.solutions)]
FileService.make_docx_file(Mocks.exercise, file_path, Mocks.annotations, FileType.docx, solutions)
self.assertEqual(os.path.getsize(file_path), 36611)
Mocks.exercise.exercise_type = ExerciseType.markWords.value
@@ -1101,13 +1094,14 @@ class CommonTestCase(unittest.TestCase):
node_expected: NodeMC = ed_expected.graph.nodes[0]
node = {"id": node_expected.id, "annis::node_name": node_expected.annis_node_name,
"annis::node_type": node_expected.annis_node_type, "annis::tok": node_expected.annis_tok,
- "annis::type": node_expected.annis_type, "udep::lemma": node_expected.udep_lemma,
- "udep::upostag": node_expected.udep_upostag, "udep::xpostag": node_expected.udep_xpostag}
+ "annis::type": node_expected.annis_type, "udep::feats": node_expected.udep_feats,
+ "udep::lemma": node_expected.udep_lemma, "udep::upostag": node_expected.udep_upostag,
+ "udep::xpostag": node_expected.udep_xpostag}
link_expected: LinkMC = ed_expected.graph.links[0]
link = {"source": link_expected.source, "target": link_expected.target,
"annis::component_name": link_expected.annis_component_name,
"annis::component_type": link_expected.annis_component_type, "udep::deprel": link_expected.udep_deprel}
- graph_data_raw: Dict = dict(directed=ed_expected.graph.directed, graph=ed_expected.graph.graph,
+ graph_data_raw: dict = dict(directed=ed_expected.graph.directed, graph=ed_expected.graph.graph,
multigraph=ed_expected.graph.multigraph, links=[link], nodes=[node])
gd: GraphData = AnnotationService.map_graph_data(graph_data_raw=graph_data_raw)
self.assertEqual(gd.graph, ed_expected.graph.graph)
@@ -1125,11 +1119,10 @@ class CommonTestCase(unittest.TestCase):
last_modified_time=1)
self.assertTrue(ui.__repr__().startswith("