Commit 123ac1df authored by Konstantin Schulz's avatar Konstantin Schulz
Browse files

API documentation is now available at ... /mc/api/v1.0/ui/

parent 90b6557c
Pipeline #11569 passed with stages
in 7 minutes and 57 seconds
......@@ -13,7 +13,6 @@ services:
- IS_THIS_A_DOCKER_CONTAINER=Yes
- PYTHONPATH=/home/mc
ports:
- "8122:22"
- "6555:6555"
restart: always
stdin_open: true
......@@ -21,6 +20,8 @@ services:
image: postgres
environment:
- POSTGRES_HOST_AUTH_METHOD=trust
# ports:
# - "5432:5432"
restart: always
volumes:
- db-data:/var/lib/postgresql/data
......@@ -47,7 +48,6 @@ services:
- IS_THIS_A_DOCKER_CONTAINER=Yes
- PYTHONPATH=/home/mc
ports:
- "8022:22"
- "5000:5000"
restart: always
stdin_open: true
......
......@@ -6,6 +6,8 @@ cover_pylib = False
omit =
*/site-packages/*
*/migrations/*
# cannot run tests for files that are generated and updated automatically
*/models_auto.py
parallel = True
[report]
......
......@@ -34,13 +34,14 @@ Or combine both commands in one line: `pip list -o --format=freeze | grep -v '^\
----------------------------------------------------------------
# Database
To autogenerate a new migration script, start the Docker container with the database and run: `flask db migrate`.
To migrate the database to a newer version manually, run: `flask db upgrade`
To migrate the database to a newer version manually, run: `flask db downgrade`
If it does nothing or fails, make sure that the environment variable FLASK_APP is set correctly (see https://flask.palletsprojects.com/en/1.1.x/cli/).
To autogenerate a new migration script:
1. Start the Docker container with the database: `docker-compose run -p 5432:5432 -d db`
2. Create a new migration: `flask db migrate`.
3. Perform a migration...
- ... to a newer version: `flask db upgrade`.
- ... to an older version: `flask db downgrade`.
- If it does nothing or fails, make sure that the environment variable FLASK_APP is set correctly (see https://flask.palletsprojects.com/en/1.1.x/cli/): `export FLASK_APP=app.py`
5. To finish the process, shut down the database container: `docker-compose down`
----------------------------------------------------------------
......
from flask import Flask
from mcserver import get_app, get_cfg
app: Flask = get_app()
......
......@@ -7,7 +7,7 @@ from mcserver import Config
from mcserver.app import init_app_common, init_logging
def create_csm_app(cfg: Type[Config] = Config):
def create_csm_app(cfg: Type[Config] = Config) -> Flask:
"""Creates a new Flask app that represents a Corpus Storage Manager."""
Config.CORPUS_STORAGE_MANAGER = CorpusStorageManager(Config.GRAPH_DATABASE_DIR)
......
......@@ -4,9 +4,10 @@ It is a server-side backend for retrieving Latin texts and
generating language exercises for them."""
import sys
from typing import Type
from flask import Flask
from mcserver.app import create_app
from mcserver.config import Config, ProductionConfig, TestingConfig, DevelopmentConfig
from mcserver.app import create_app
def get_app() -> Flask:
......
"""The main module for the application. It contains the application factory and provides access to the database."""
import logging
import os
import sys
from logging.handlers import RotatingFileHandler
from threading import Thread
from time import strftime
from typing import Type
import connexion
import flask
from connexion import FlaskApp
from flask import Flask, got_request_exception, request, Response, send_from_directory
from flask_cors import CORS
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from open_alchemy import init_yaml
from mcserver.config import Config
db: SQLAlchemy = SQLAlchemy() # session_options={"autocommit": True}
migrate: Migrate = Migrate(directory=Config.MIGRATIONS_DIRECTORY)
# do this _BEFORE_ you add any APIs to your application
init_yaml(Config.API_SPEC_FILE_PATH, base=db.Model,
models_filename=os.path.join(Config.MC_SERVER_DIRECTORY, "models_auto.py"))
def apply_event_handlers(app: FlaskApp):
"""Applies event handlers to a given Flask application, such as logging after requests or teardown logic."""
@app.app.after_request
def after_request(response: Response) -> Response:
""" Logs metadata for every request. """
timestamp = strftime('[%Y-%m-%d %H:%M:%S]')
app.app.logger.info('%s %s %s %s %s %s', timestamp, request.remote_addr, request.method, request.scheme,
request.full_path, response.status)
return response
@app.route(Config.SERVER_URI_FAVICON)
def get_favicon():
"""Sends the favicon to browsers, which is used, e.g., in the tabs as a symbol for our application."""
mime_type: str = 'image/vnd.microsoft.icon'
return send_from_directory(Config.ASSETS_DIRECTORY, Config.FAVICON_FILE_NAME, mimetype=mime_type)
app.app.teardown_appcontext(shutdown_session)
def create_app(cfg: Type[Config] = Config) -> Flask:
......@@ -26,7 +52,7 @@ def create_app(cfg: Type[Config] = Config) -> Flask:
# use local postgres database for migrations
if len(sys.argv) > 2 and sys.argv[2] == Config.FLASK_MIGRATE:
cfg.SQLALCHEMY_DATABASE_URI = Config.DATABASE_URL_LOCAL
app = init_app_common(cfg=cfg)
app: Flask = init_app_common(cfg=cfg)
from mcserver.app.services import bp as services_bp
app.register_blueprint(services_bp)
from mcserver.app.api import bp as api_bp
......@@ -51,27 +77,12 @@ def full_init(app: Flask, is_csm: bool) -> None:
def init_app_common(cfg: Type[Config] = Config, is_csm: bool = False) -> Flask:
""" Initializes common Flask parts, e.g. CORS, configuration, database, migrations and custom corpora."""
app = Flask(__name__)
@app.after_request
def after_request(response: Response) -> Response:
""" Logs metadata for every request. """
timestamp = strftime('[%Y-%m-%d %H:%M:%S]')
app.logger.info('%s %s %s %s %s %s', timestamp, request.remote_addr, request.method, request.scheme,
request.full_path, response.status)
return response
@app.route(Config.SERVER_URI_FAVICON)
def get_favicon():
"""Sends the favicon to browsers, which is used, e.g., in the tabs as a symbol for our application."""
mime_type: str = 'image/vnd.microsoft.icon'
return send_from_directory(Config.ASSETS_DIRECTORY, Config.FAVICON_FILE_NAME, mimetype=mime_type)
@app.teardown_appcontext
def shutdown_session(exception=None):
""" Shuts down the session when the application exits. (maybe also after every request ???) """
db.session.remove()
connexion_app: FlaskApp = connexion.FlaskApp(
__name__, port=(cfg.CORPUS_STORAGE_MANAGER_PORT if is_csm else cfg.HOST_PORT),
specification_dir=Config.MC_SERVER_DIRECTORY)
connexion_app.add_api(Config.API_SPEC_FILE_PATH, arguments={'title': 'Machina Callida Backend REST API'})
apply_event_handlers(connexion_app)
app: Flask = connexion_app.app
# allow CORS requests for all API routes
CORS(app) # , resources=r"/*"
app.config.from_object(cfg)
......@@ -120,6 +131,12 @@ def start_updater(app: Flask) -> Thread:
return t
def shutdown_session(exception=None):
""" Shuts down the session when the application exits. (maybe also after every request ???) """
db.session.remove()
# import the models so we can access them from other parts of the app using imports from "app.models";
# this has to be at the bottom of the file
from mcserver.app import models
from mcserver.app import api
"""The API blueprint. Register it on the main application to enable the REST API for text retrieval."""
from flask import Blueprint
from flask_restful import Api
from mcserver import Config
bp = Blueprint("api", __name__)
api = Api(bp)
from mcserver.app.api.corpusAPI import CorpusAPI
from mcserver.app.api.corpusListAPI import CorpusListAPI
from mcserver.app.api.exerciseAPI import ExerciseAPI
from . import corpusAPI, corpusListAPI, exerciseAPI
from mcserver.app.api.exerciseListAPI import ExerciseListAPI
from mcserver.app.api.fileAPI import FileAPI
from mcserver.app.api.frequencyAPI import FrequencyAPI
......@@ -22,9 +19,6 @@ from mcserver.app.api.validReffAPI import ValidReffAPI
from mcserver.app.api.vectorNetworkAPI import VectorNetworkAPI
from mcserver.app.api.vocabularyAPI import VocabularyAPI
api.add_resource(CorpusListAPI, Config.SERVER_URI_CORPORA, endpoint="corpora")
api.add_resource(CorpusAPI, Config.SERVER_URI_CORPORA + "/<int:cid>", endpoint="corpus")
api.add_resource(ExerciseAPI, Config.SERVER_URI_EXERCISE, endpoint="exercise")
api.add_resource(ExerciseListAPI, Config.SERVER_URI_EXERCISE_LIST, endpoint="exerciseList")
api.add_resource(FileAPI, Config.SERVER_URI_FILE, endpoint="file")
api.add_resource(FrequencyAPI, Config.SERVER_URI_FREQUENCY, endpoint="frequency")
......
"""The corpus API. Add it to your REST API to provide users with metadata about specific texts."""
from flask_restful import Resource, abort, marshal
from flask_restful.reqparse import RequestParser
from typing import Union
import connexion
from connexion.lifecycle import ConnexionResponse
from flask import Response
from mcserver import Config
from mcserver.app import db
from mcserver.app.models import Corpus, corpus_fields
from mcserver.app.services import NetworkService
from mcserver.models_auto import Corpus
def delete(cid: int) -> Union[Response, ConnexionResponse]:
"""The DELETE method for the corpus REST API. It deletes metadata for a specific text."""
corpus: Corpus = db.session.query(Corpus).filter_by(cid=cid).first()
if corpus is None:
return connexion.problem(404, Config.ERROR_TITLE_NOT_FOUND, Config.ERROR_MESSAGE_CORPUS_NOT_FOUND)
db.session.delete(corpus)
db.session.commit()
return NetworkService.make_json_response(True)
def get(cid: int) -> Union[Response, ConnexionResponse]:
"""The GET method for the corpus REST API. It provides metadata for a specific text."""
corpus: Corpus = db.session.query(Corpus).filter_by(cid=cid).first()
if corpus is None:
return connexion.problem(404, Config.ERROR_TITLE_NOT_FOUND, Config.ERROR_MESSAGE_CORPUS_NOT_FOUND)
return NetworkService.make_json_response(corpus.to_dict())
class CorpusAPI(Resource):
"""The corpus API resource. It enables some of the CRUD operations for metadata about specific texts."""
def __init__(self):
"""Initialize possible arguments for calls to the corpus REST API."""
self.reqparse: RequestParser = NetworkService.base_request_parser.copy()
self.reqparse.add_argument("title", type=str, required=False, help="No title provided")
self.reqparse.add_argument("author", type=str, required=False, help="No author provided")
self.reqparse.add_argument("source_urn", type=str, required=False, help="No source URN provided")
super(CorpusAPI, self).__init__()
def get(self, cid):
"""The GET method for the corpus REST API. It provides metadata for a specific text."""
corpus: Corpus = Corpus.query.filter_by(cid=cid).first()
if corpus is None:
abort(404)
return {"corpus": marshal(corpus, corpus_fields)}
def put(self, cid):
"""The PUT method for the corpus REST API. It provides updates metadata for a specific text."""
corpus: Corpus = Corpus.query.filter_by(cid=cid).first()
if corpus is None:
abort(404)
args = self.reqparse.parse_args()
for k, v in args.items():
if v is not None:
setattr(corpus, k, v)
db.session.commit()
return {"corpus": marshal(corpus, corpus_fields)}
def delete(self, cid):
"""The DELETE method for the corpus REST API. It deletes metadata for a specific text."""
corpus: Corpus = Corpus.query.filter_by(cid=cid).first()
if corpus is None:
abort(404)
db.session.delete(corpus)
db.session.commit()
return {"result": True}
def patch(cid: int, **kwargs) -> Union[Response, ConnexionResponse]:
"""The PUT method for the corpus REST API. It provides updates metadata for a specific text."""
corpus: Corpus = db.session.query(Corpus).filter_by(cid=cid).first()
if corpus is None:
return connexion.problem(404, Config.ERROR_TITLE_NOT_FOUND, Config.ERROR_MESSAGE_CORPUS_NOT_FOUND)
for k, v in kwargs.items():
if v is not None:
setattr(corpus, k, v)
db.session.commit()
return NetworkService.make_json_response(corpus.to_dict())
"""The corpus list API. Add it to your REST API to provide users with a list of metadata for available texts."""
from datetime import datetime
from flask import jsonify
from flask_restful import Resource, marshal
from flask_restful.reqparse import RequestParser
from connexion.lifecycle import ConnexionResponse
from flask import Response
from sqlalchemy.exc import OperationalError, InvalidRequestError
from typing import List, Union
from mcserver.app import db
from mcserver.app.models import UpdateInfo, ResourceType, Corpus, corpus_fields
from mcserver.app.services import CorpusService, NetworkService
class CorpusListAPI(Resource):
"""The corpus list API resource. It enables some of the CRUD operations for a list of metadata about all texts."""
from mcserver.app.models import ResourceType
from mcserver.app.services import NetworkService
from mcserver.models_auto import Corpus, UpdateInfo
def __init__(self):
"""Initialize possible arguments for calls to the corpus list REST API."""
self.reqparse: RequestParser = NetworkService.base_request_parser.copy()
self.reqparse.add_argument("last_update_time", type=int, required=True,
help="No milliseconds time for last update provided")
super(CorpusListAPI, self).__init__()
def get(self):
"""The GET method for the corpus list REST API. It provides metadata for all available texts."""
args = self.reqparse.parse_args()
last_update: int = args["last_update_time"]
last_update_time: datetime = datetime.fromtimestamp(last_update / 1000.0)
ui_cts: UpdateInfo
try:
ui_cts = UpdateInfo.query.filter_by(resource_type=ResourceType.cts_data.name).first()
except (InvalidRequestError, OperationalError):
db.session.rollback()
return None
if ui_cts.last_modified_time >= last_update_time:
CorpusService.existing_corpora = Corpus.query.all()
return jsonify({"corpora": [marshal(corpus, corpus_fields) for corpus in CorpusService.existing_corpora]})
return None
def get(last_update_time: int) -> Union[Response, ConnexionResponse]:
"""The GET method for the corpus list REST API. It provides metadata for all available texts."""
ui_cts: UpdateInfo
try:
ui_cts = db.session.query(UpdateInfo).filter_by(resource_type=ResourceType.cts_data.name).first()
except (InvalidRequestError, OperationalError):
db.session.rollback()
return NetworkService.make_json_response(None)
if ui_cts.last_modified_time >= last_update_time / 1000:
corpora: List[Corpus] = db.session.query(Corpus).all()
return NetworkService.make_json_response([x.to_dict() for x in corpora])
return NetworkService.make_json_response(None)
import uuid
from collections import OrderedDict
from datetime import datetime
import connexion
import rapidjson as json
from typing import List, Dict
from typing import List, Dict, Union
import requests
from flask_restful import Resource, marshal, abort
from flask_restful.reqparse import RequestParser
from connexion.lifecycle import ConnexionResponse
from flask import Response
from mcserver.app import db
from mcserver.app.models import ExerciseType, Solution, ExerciseData, Exercise, exercise_fields, AnnisResponse, \
Phenomenon, TextComplexity, TextComplexityMeasure, UpdateInfo, ResourceType
from mcserver.app.models import ExerciseType, Solution, ExerciseData, AnnisResponse, Phenomenon, TextComplexity, \
TextComplexityMeasure, ResourceType, ExerciseMC
from mcserver.app.services import AnnotationService, CorpusService, NetworkService, TextComplexityService
from mcserver.config import Config
class ExerciseAPI(Resource):
"""The exercise API resource. It creates exercises for a given text."""
def __init__(self):
"""Initialize possible arguments for calls to the exercise REST API."""
# TODO: switch to other request parser, e.g. Marshmallow, because the one used by Flask-RESTful does not allow parsing arguments from different locations, e.g. one argument from 'location=args' and another argument from 'location=form'
self.reqparse: RequestParser = NetworkService.base_request_parser.copy()
self.reqparse.add_argument("urn", type=str, required=False, location="form", help="No URN provided")
self.reqparse.add_argument("type", type=str, required=False, location="form", help="No exercise type provided")
self.reqparse.add_argument("search_values", type=str, required=False, location="form",
help="No search value provided")
self.reqparse.add_argument("type_translation", type=str, location="form", required=False,
help="No exercise type translation provided")
self.reqparse.add_argument("work_author", type=str, location="form", required=False,
help="No work_author provided", default="")
self.reqparse.add_argument("work_title", type=str, required=False, location="form",
help="No work title provided", default="")
self.reqparse.add_argument("instructions", type=str, required=False, location="form", default="")
self.reqparse.add_argument("general_feedback", type=str, required=False, location="form", default=" ")
self.reqparse.add_argument("correct_feedback", type=str, required=False, location="form", default=" ")
self.reqparse.add_argument("partially_correct_feedback", type=str, required=False, location="form", default=" ")
self.reqparse.add_argument("incorrect_feedback", type=str, required=False, location="form", default=" ")
self.reqparse.add_argument("eid", type=str, required=False, location="args", help="No exercise ID provided")
super(ExerciseAPI, self).__init__()
def get(self):
args: dict = self.reqparse.parse_args()
eid: str = args["eid"]
exercise: Exercise = Exercise.query.filter_by(eid=eid).first()
if exercise is None:
abort(404)
ar: AnnisResponse = CorpusService.get_corpus(cts_urn=exercise.urn, is_csm=False)
if not ar.nodes:
abort(404)
exercise.last_access_time = datetime.utcnow()
db.session.commit()
exercise_type: ExerciseType = ExerciseType(exercise.exercise_type)
ar.solutions = json.loads(exercise.solutions)
ar.uri = exercise.uri
ar.exercise_id = exercise.eid
ar.exercise_type = exercise_type.value
return NetworkService.make_json_response(ar.__dict__)
def post(self):
# get request arguments
args: dict = self.reqparse.parse_args()
urn: str = args["urn"]
exercise_type: ExerciseType = ExerciseType(args["type"])
search_values_json: str = args["search_values"]
search_values_list: List[str] = json.loads(search_values_json)
aqls: List[str] = AnnotationService.map_search_values_to_aql(search_values_list=search_values_list,
exercise_type=exercise_type)
search_phenomena: List[Phenomenon] = [Phenomenon[x.split("=")[0]] for x in search_values_list]
# if there is custom text instead of a URN, immediately annotate it
conll_string_or_urn: str = urn if CorpusService.is_urn(urn) else AnnotationService.get_udpipe(
CorpusService.get_raw_text(urn, False))
# construct graph from CONLL data
response: dict = get_graph_data(title=urn, conll_string_or_urn=conll_string_or_urn, aqls=aqls,
exercise_type=exercise_type, search_phenomena=search_phenomena)
solutions_dict_list: List[Dict] = response["solutions"]
solutions: List[Solution] = [Solution(json_dict=x) for x in solutions_dict_list]
ar: AnnisResponse = make_new_exercise(graph_data_raw=response["graph_data_raw"], solutions=solutions, args=args,
conll=response["conll"], search_values=args["search_values"], urn=urn)
return NetworkService.make_json_response(ar.__dict__)
from mcserver.models_auto import Exercise, TExercise, UpdateInfo
def adjust_solutions(exercise_data: ExerciseData, exercise_type: str, solutions: List[Solution]) -> List[Solution]:
......@@ -93,6 +25,23 @@ def adjust_solutions(exercise_data: ExerciseData, exercise_type: str, solutions:
return solutions
def get(eid: str) -> Union[Response, ConnexionResponse]:
exercise: TExercise = db.session.query(Exercise).filter_by(eid=eid).first()
if exercise is None:
return connexion.problem(404, Config.ERROR_TITLE_NOT_FOUND, Config.ERROR_MESSAGE_EXERCISE_NOT_FOUND)
ar: AnnisResponse = CorpusService.get_corpus(cts_urn=exercise.urn, is_csm=False)
if not ar.nodes:
return connexion.problem(404, Config.ERROR_TITLE_NOT_FOUND, Config.ERROR_MESSAGE_CORPUS_NOT_FOUND)
exercise.last_access_time = datetime.utcnow().timestamp()
db.session.commit()
exercise_type: ExerciseType = ExerciseType(exercise.exercise_type)
ar.solutions = json.loads(exercise.solutions)
ar.uri = NetworkService.get_exercise_uri(exercise)
ar.exercise_id = exercise.eid
ar.exercise_type = exercise_type.value
return NetworkService.make_json_response(ar.__dict__)
def get_graph_data(title: str, conll_string_or_urn: str, aqls: List[str], exercise_type: ExerciseType,
search_phenomena: List[Phenomenon]):
"""Sends annotated text data or a URN to the Corpus Storage Manager in order to get a graph."""
......@@ -104,38 +53,35 @@ def get_graph_data(title: str, conll_string_or_urn: str, aqls: List[str], exerci
try:
return json.loads(response.text)
except ValueError:
abort(500)
raise
def make_new_exercise(solutions: List[Solution], args: dict, search_values: str, graph_data_raw: dict,
conll: str, urn: str) -> AnnisResponse:
def make_new_exercise(conll: str, correct_feedback: str, exercise_type: str, general_feedback: str,
graph_data_raw: dict, incorrect_feedback: str, instructions: str, partially_correct_feedback: str,
search_values: str, solutions: List[Solution], type_translation: str, urn: str,
work_author: str, work_title: str) -> AnnisResponse:
""" Creates a new exercise and makes it JSON serializable. """
# generate a GUID so we can offer the exercise XML as a file download
xml_guid = str(uuid.uuid4())
# assemble the mapped exercise data
ed: ExerciseData = AnnotationService.map_graph_data_to_exercise(graph_data_raw=graph_data_raw, solutions=solutions,
xml_guid=xml_guid)
exercise_type = args["type"]
# for markWords exercises, add the maximum number of correct solutions to the description
instructions: str = args["instructions"] + (
f"({len(solutions)})" if exercise_type == ExerciseType.markWords.value else "")
instructions += (f"({len(solutions)})" if exercise_type == ExerciseType.markWords.value else "")
# map the exercise data to our database data model
new_exercise: Exercise = map_exercise_data_to_database(solutions=solutions, exercise_data=ed,
exercise_type=exercise_type, instructions=instructions,
xml_guid=xml_guid, correct_feedback=args["correct_feedback"],
partially_correct_feedback=args[
"partially_correct_feedback"],
incorrect_feedback=args["incorrect_feedback"],
general_feedback=args["general_feedback"],
exercise_type_translation=args.get("type_translation", ""),
conll=conll, work_author=args["work_author"],
work_title=args["work_title"], search_values=search_values,
urn=urn)
# marshal the whole object so we can get the right URI for download purposes
new_exercise_marshal: OrderedDict = marshal(new_exercise, exercise_fields)
xml_guid=xml_guid, correct_feedback=correct_feedback,
partially_correct_feedback=partially_correct_feedback,
incorrect_feedback=incorrect_feedback,
general_feedback=general_feedback,
exercise_type_translation=type_translation, conll=conll,
work_author=work_author, work_title=work_title,
search_values=search_values, urn=urn)
# create a response
return AnnisResponse(solutions=json.loads(new_exercise.solutions), uri=new_exercise_marshal["uri"],
exercise_id=xml_guid)
return AnnisResponse(
solutions=json.loads(new_exercise.solutions), uri=f"{Config.SERVER_URI_FILE}/{new_exercise.eid}",
exercise_id=xml_guid)
def map_exercise_data_to_database(exercise_data: ExerciseData, exercise_type: str, instructions: str, xml_guid: str,
......@@ -146,19 +92,53 @@ def map_exercise_data_to_database(exercise_data: ExerciseData, exercise_type: st
# sort the nodes according to the ordering links
AnnotationService.sort_nodes(graph_data=exercise_data.graph)
# add content to solutions
solutions = adjust_solutions(exercise_data=exercise_data, solutions=solutions, exercise_type=exercise_type)
solutions: List[Solution] = adjust_solutions(exercise_data=exercise_data, solutions=solutions,
exercise_type=exercise_type)
quiz_solutions: str = json.dumps([x.serialize() for x in solutions])
tc: TextComplexity = TextComplexityService.text_complexity(TextComplexityMeasure.all.name, urn, False,
exercise_data.graph)
new_exercise: Exercise = Exercise(conll=conll, correct_feedback=correct_feedback, eid=xml_guid,
exercise_type=exercise_type, exercise_type_translation=exercise_type_translation,
general_feedback=general_feedback, incorrect_feedback=incorrect_feedback,
instructions=instructions, partially_correct_feedback=partially_correct_feedback,
search_values=search_values, solutions=quiz_solutions, text_complexity=tc.all,
work_author=work_author, work_title=work_title, uri=exercise_data.uri, urn=urn)
new_exercise: Exercise = ExerciseMC.from_dict(
conll=conll, correct_feedback=correct_feedback, eid=xml_guid, exercise_type=exercise_type,
exercise_type_translation=exercise_type_translation, general_feedback=general_feedback,
incorrect_feedback=incorrect_feedback, instructions=instructions,
last_access_time=datetime.utcnow().timestamp(), partially_correct_feedback=partially_correct_feedback,
search_values=search_values, solutions=quiz_solutions, text_complexity=tc.all, work_author=work_author,
work_title=work_title, urn=urn)
# add the mapped exercise to the database
db.session.add(new_exercise)
ui_exercises: UpdateInfo = UpdateInfo.query.filter_by(resource_type=ResourceType.exercise_list.name).first()
ui_exercises.last_modified_time = datetime.utcnow()
ui_exercises: UpdateInfo = db.session.query(UpdateInfo).filter_by(
resource_type=ResourceType.exercise_list.name).first()
ui_exercises.last_modified_time = datetime.utcnow().timestamp()
db.session.commit()
return new_exercise
def post(exercise_data: dict) -> Union[Response, ConnexionResponse]:
exercise_type: ExerciseType = ExerciseType(exercise_data["type"])
search_values_list: List[str] = json.loads(exercise_data["search_values"])
aqls: List[str] = AnnotationService.map_search_values_to_aql(search_values_list=search_values_list,
exercise_type=exercise_type)
search_phenomena: List[Phenomenon] = [Phenomenon[x.split("=")[0]] for x in search_values_list]
urn: str = exercise_data.get("urn", "")
# if there is custom text instead of a URN, immediately annotate it
conll_string_or_urn: str = urn if CorpusService.is_urn(urn) else AnnotationService.get_udpipe(
CorpusService.get_raw_text(urn, False))
try:
# construct graph from CONLL data
response: dict = get_graph_data(title=urn, conll_string_or_urn=conll_string_or_urn, aqls=aqls,