diff --git a/html/hurrian/commonHurrian.php b/html/hurrian/commonHurrian.php
index 3c306fcd..ed36f097 100644
--- a/html/hurrian/commonHurrian.php
+++ b/html/hurrian/commonHurrian.php
@@ -1,7 +1,7 @@
query($sql);
+$data = array();
+while ($row = $result->fetch_assoc())
{
- $db = new SQLite3($dbFileName);
- $sql = <<query($sql);
- $data = array();
- while ($row = $result->fetchArray())
- {
- $fields = array($row['segmentation'],
- $row['translation_de'],
- $row['morph_tag'],
- $row['part_of_speech'],
- $row['det']);
- $data[$row['transcription']][] = implode(' @ ', $fields);
- }
- echo json_encode($data, JSON_UNESCAPED_UNICODE);
-}
-else
-{
- echo '{}';
+ $fields = array($row['segmentation'],
+ $row['translation_de'],
+ $row['morph_tag'],
+ $row['part_of_speech'],
+ $row['determinative']);
+ $data[$row['transcription']][] = implode(' @ ', $fields);
}
+echo json_encode($data, JSON_UNESCAPED_UNICODE);
?>
\ No newline at end of file
diff --git a/html/hurrian/getHurrianLexicon.php b/html/hurrian/getHurrianLexicon.php
index 4bbb1572..db909d48 100644
--- a/html/hurrian/getHurrianLexicon.php
+++ b/html/hurrian/getHurrianLexicon.php
@@ -1,25 +1,18 @@
query($sql);
+$data = array();
+while ($row = $result->fetch_assoc())
{
- $db = new SQLite3($dbFileName);
- $sql = <<query($sql);
- $data = array();
- while ($row = $result->fetchArray())
- {
- $key = $row['stem'].','.$row['part_of_speech'];
- $data[$key][] = $row['translation_de'];
- }
- echo json_encode($data, JSON_UNESCAPED_UNICODE);
-}
-else
-{
- echo '{}';
+ $key = $row['stem'].','.$row['part_of_speech'];
+ $data[$key][] = $row['translation_de'];
}
+echo json_encode($data, JSON_UNESCAPED_UNICODE);
?>
\ No newline at end of file
diff --git a/html/hurrian/updateHurrianDictionary.php b/html/hurrian/updateHurrianDictionary.php
index 7ecfc731..be9898e1 100644
--- a/html/hurrian/updateHurrianDictionary.php
+++ b/html/hurrian/updateHurrianDictionary.php
@@ -1,102 +1,75 @@
exec($sql);
-}
+//Datenbank öffnen
+$db = connect_to_db('hurrian_lexical_database');
//Lemma finden oder hinzufügen
$findLemma = <<query($findLemma);
-$row = $result->fetchArray();
+$row = $result->fetch_assoc();
if (!$row) {
$sql = <<exec($sql);
+ $db->query($sql);
$result = $db->query($findLemma);
- $row = $result->fetchArray();
+ $row = $result->fetch_assoc();
}
$lemma_id = $row['lemma_id'];
//Suffixkette finden oder hinzufügen
$findSuffixChain = <<query($findSuffixChain);
-$row = $result->fetchArray();
+$row = $result->fetch_assoc();
if (!$row) {
$sql = <<exec($sql);
+ $db->query($sql);
$result = $db->query($findSuffixChain);
- $row = $result->fetchArray();
+ $row = $result->fetch_assoc();
}
$suffix_chain_id = $row['suffix_chain_id'];
//Wortform finden oder hinzufügen
$findWordform = <<query($findWordform);
-$row = $result->fetchArray();
+$row = $result->fetch_assoc();
if (!$row) {
$sql = <<exec($sql);
+ $db->query($sql);
}
?>
\ No newline at end of file
diff --git a/html/mysqliconn.php b/html/mysqliconn.php
index f09db526..6990fcbf 100644
--- a/html/mysqliconn.php
+++ b/html/mysqliconn.php
@@ -5,9 +5,9 @@
const database = 'hpm';
const port = 3406;
-function connect_to_db(): mysqli
+function connect_to_db($database=database): mysqli
{
- $db = mysqli_connect(host, user, password, database, port);
+ $db = mysqli_connect(host, user, password, $database, port);
if ($db) {
return $db;
diff --git a/sql_inits/00_users_init.sql b/sql_inits/00_users_init.sql
index 7f304526..f3fd751a 100644
--- a/sql_inits/00_users_init.sql
+++ b/sql_inits/00_users_init.sql
@@ -1,5 +1,6 @@
-create user hpm@'%' identified by '1234';
-
-grant all on hpm.* to hpm@'%';
-
-flush privileges;
+create user hpm@'%' identified by '1234';
+
+grant all on hpm.* to hpm@'%';
+grant all on hurrian_lexical_database.* to hpm@'%';
+
+flush privileges;
diff --git a/sql_inits/create_hur_lex_db.sql b/sql_inits/create_hur_lex_db.sql
new file mode 100644
index 00000000..05524773
--- /dev/null
+++ b/sql_inits/create_hur_lex_db.sql
@@ -0,0 +1,63 @@
+create database if not exists hurrian_lexical_database;
+use hurrian_lexical_database;
+
+/*drop table if exists
+ wordform,
+ lemma,
+ suffix_chain;*/
+
+/* begin table creation */
+
+CREATE TABLE if not exists lemma (
+ lemma_id mediumint unsigned not null auto_increment,
+ stem text not null,
+ part_of_speech text not null,
+ translation_de text not null,
+ determinative text not null,
+ constraint pk_lemma primary key (lemma_id)
+);
+
+CREATE TABLE if not exists suffix_chain (
+ suffix_chain_id mediumint unsigned not null auto_increment,
+ suffixes text not null,
+ morph_tag text not null,
+ part_of_speech text not null,
+ constraint pk_suffix_chain primary key (suffix_chain_id)
+);
+
+CREATE TABLE if not exists wordform (
+ wordform_id mediumint unsigned not null auto_increment,
+ transcription text not null,
+ segmentation text not null,
+ lemma_id mediumint unsigned not null,
+ suffix_chain_id mediumint unsigned not null,
+ constraint fk_lemma_id foreign key (lemma_id)
+ references lemma (lemma_id),
+ constraint fk_suffix_chain_id foreign key (suffix_chain_id)
+ references suffix_chain (suffix_chain_id),
+ constraint pk_wordform primary key (wordform_id)
+);
+
+/* end table creation */
+
+/* begin data population */
+
+/* lemma data */
+insert into lemma (lemma_id, stem, part_of_speech, translation_de, determinative)
+values (null, 'nāli', 'noun', 'Rehbock', '');
+insert into lemma (lemma_id, stem, part_of_speech, translation_de, determinative)
+values (null, 'tāri', 'noun', 'Feuer', '');
+insert into lemma (lemma_id, stem, part_of_speech, translation_de, determinative)
+values (null, 'id', 'verb', 'schlagen', '');
+insert into lemma (lemma_id, stem, part_of_speech, translation_de, determinative)
+values (null, 'am', 'verb', 'brennen', '');
+
+/* suffix chain data */
+insert into suffix_chain (suffix_chain_id, suffixes, morph_tag, part_of_speech)
+values (null, '(n>)re-ž', 'RELAT.SG-ERG', 'noun');
+
+/* wordform data */
+insert into wordform (wordform_id, transcription, segmentation, lemma_id, suffix_chain_id)
+values (null, 'tārrež', 'tār(i)-(n>)re-ž', 2, 1);
+
+/* end data population */
\ No newline at end of file
diff --git a/sql_inits/query_hurrian_wordforms.sql b/sql_inits/query_hurrian_wordforms.sql
new file mode 100644
index 00000000..0f8b6164
--- /dev/null
+++ b/sql_inits/query_hurrian_wordforms.sql
@@ -0,0 +1,5 @@
+select lemma.stem, /*transcription,*/ segmentation, lemma.translation_de, suffix_chain.morph_tag
+ from wordform
+ inner join lemma on wordform.lemma_id=lemma.lemma_id
+ inner join suffix_chain on wordform.suffix_chain_id=suffix_chain.suffix_chain_id
+ order by lemma.stem, transcription;
diff --git a/ui/src/xmlEditor/hur/dictionary.ts b/ui/src/xmlEditor/hur/dictionary.ts
index a4198473..e5d64bdc 100644
--- a/ui/src/xmlEditor/hur/dictionary.ts
+++ b/ui/src/xmlEditor/hur/dictionary.ts
@@ -5,8 +5,10 @@ import { makeStandardAnalyses } from './standardAnalysis';
import { setGlosses, saveGloss } from './glossUpdater';
import { MorphologicalAnalysis, writeMorphAnalysisValue }
from '../../model/morphologicalAnalysis';
+import { getHurrianDictionaryUrl } from '../../urls';
import { convertDictionary, updateAndValidateDictionary } from './utility';
import { isValid, normalize } from './morphologicalAnalysisValidator';
+import { sendMorphologicalAnalysisToTheServer } from './sendToTheServer';
const dictionary: Map> = new Map();
@@ -80,25 +82,26 @@ export function sendMorphologicalAnalysisToTheServer(word: string, analysis: str
}
export function updateHurrianDictionary(node: XmlElementNode, number: number, value: string): void {
- if (!isValid(value)) {
- return;
- }
- value = normalize(value, false);
- if (number === 1) {
- delete node.attributes.firstAnalysisIsPlaceholder;
- }
- const transcription: string = node.attributes.trans || '';
- let possibilities: Set | undefined;
- if (dictionary.has(transcription)) {
- possibilities = dictionary.get(transcription);
- }
- else {
- possibilities = new Set();
- dictionary.set(transcription, possibilities);
- }
- if (possibilities === undefined) {
- throw new Error();
-
+ if (isValid(value)) {
+ value = normalize(value, false);
+ if (number === 1) {
+ delete node.attributes.firstAnalysisIsPlaceholder;
+ }
+ const transcription: string = node.attributes.trans || '';
+ let possibilities: Set | undefined;
+ if (dictionary.has(transcription)) {
+ possibilities = dictionary.get(transcription);
+ }
+ else {
+ possibilities = new Set();
+ dictionary.set(transcription, possibilities);
+ }
+ if (possibilities === undefined) {
+ throw new Error();
+ }
+ possibilities.add(value);
+ sendMorphologicalAnalysisToTheServer(transcription, value);
+ saveGloss(number, value);
}
}
diff --git a/ui/src/xmlEditor/hur/sendToTheServer.ts b/ui/src/xmlEditor/hur/sendToTheServer.ts
new file mode 100644
index 00000000..20302dfb
--- /dev/null
+++ b/ui/src/xmlEditor/hur/sendToTheServer.ts
@@ -0,0 +1,9 @@
+import { updateHurrianDictionaryUrl } from '../../urls';
+
+export function sendMorphologicalAnalysisToTheServer(word: string, analysis: string) {
+ const formData = new FormData();
+ formData.append('word', word);
+ formData.append('analysis', analysis);
+
+ fetch(updateHurrianDictionaryUrl, {method: 'POST', body: formData});
+}
\ No newline at end of file
diff --git a/ui/src/xmlEditor/hur/utility.ts b/ui/src/xmlEditor/hur/utility.ts
index b5419398..f3bc5d8e 100644
--- a/ui/src/xmlEditor/hur/utility.ts
+++ b/ui/src/xmlEditor/hur/utility.ts
@@ -1,4 +1,5 @@
import { isValid, normalize } from './morphologicalAnalysisValidator';
+import { sendMorphologicalAnalysisToTheServer } from './sendToTheServer';
export function convertDictionary(dictionary: Map>): { [key: string]: string[] } {
const object: { [key: string]: string[] } = {};
@@ -29,7 +30,9 @@ export function updateAndValidateDictionary(dictionary: Map>
const newSet: Set = new Set();
for (const value of values) {
if (isValid(value)) {
+ const normalized = normalize(value, true);
newSet.add(normalize(value, true));
+ sendMorphologicalAnalysisToTheServer(key, normalized);
}
}
if (newSet.size > 0) {
@@ -38,7 +41,9 @@ export function updateAndValidateDictionary(dictionary: Map>
} else {
for (const value of values) {
if (isValid(value)) {
- currSet.add(normalize(value, true));
+ const normalized = normalize(value, true);
+ currSet.add(normalized);
+ sendMorphologicalAnalysisToTheServer(key, normalized);
}
}
}