diff --git a/.gitignore b/.gitignore index fd45b12..ff69e6a 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ .gradle /local.properties /.idea/caches/build_file_checksums.ser +/.idea/caches /.idea/libraries /.idea/modules.xml /.idea/workspace.xml diff --git a/.idea/encodings.xml b/.idea/encodings.xml new file mode 100644 index 0000000..15a15b2 --- /dev/null +++ b/.idea/encodings.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/.idea/gradle.xml b/.idea/gradle.xml new file mode 100644 index 0000000..0afeaa6 --- /dev/null +++ b/.idea/gradle.xml @@ -0,0 +1,21 @@ + + + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 0000000..af0bbdd --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/runConfigurations.xml b/.idea/runConfigurations.xml new file mode 100644 index 0000000..7f68460 --- /dev/null +++ b/.idea/runConfigurations.xml @@ -0,0 +1,12 @@ + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 0000000..35eb1dd --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/app/build.gradle b/app/build.gradle index 778cd3d..9ca5296 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -22,9 +22,12 @@ dependencies { implementation fileTree(dir: 'libs', include: ['*.jar']) implementation 'com.android.support:appcompat-v7:28.0.0' implementation 'com.android.support.constraint:constraint-layout:1.1.3' + implementation 'com.google.code.gson:gson:2.8.5' testImplementation 'junit:junit:4.12' - testImplementation 'org.mockito:mockito-core:2.7.22' + testImplementation 'org.mockito:mockito-core:2.23.0' + testImplementation 'com.squareup.okhttp3:mockwebserver:3.12.0' androidTestImplementation 'com.android.support.test:runner:1.0.2' androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2' - androidTestImplementation 'org.mockito:mockito-android:2.7.22' + androidTestImplementation 'org.mockito:mockito-android:2.23.0' + androidTestImplementation 'com.squareup.okhttp3:mockwebserver:3.12.0' } diff --git a/app/src/main/java/com/opdup/btcrserviceclient/BTCRDIDResolver.java b/app/src/main/java/com/opdup/btcrserviceclient/BTCRDIDResolver.java deleted file mode 100644 index c5d7035..0000000 --- a/app/src/main/java/com/opdup/btcrserviceclient/BTCRDIDResolver.java +++ /dev/null @@ -1,76 +0,0 @@ -package com.opdup.btcrserviceclient; - -import org.json.JSONException; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; - -public class BTCRDIDResolver { - - private String btcrDid; - private String txRef; - - private URL root; - private URL endpoint; - private String PROTOCOL = "https"; - private String ADDRESS = "localhost"; - private String PORT = "8080"; - private int TX_REF_SUBSTRING = 9; - - //Constructor - public BTCRDIDResolver(String btcrDid) throws MalformedURLException { - this.root = new URL(this.PROTOCOL, this.ADDRESS, this.PORT); - this.btcrDid = btcrDid; - this.txRef = this.btcrDid.substring(TX_REF_SUBSTRING); - } - - public BTCRDIDResolver(String btcrDid, URL rootURl) throws MalformedURLException { - this.root = rootURl; - this.btcrDid = btcrDid; - this.txRef = this.btcrDid.substring(TX_REF_SUBSTRING); - } - - // Resolve BTCR DID - public String getBtcrDidResolve() throws IOException { - this.endpoint = new URL(this.root, "txref/" + this.txRef + "/resolve"); - //String url = PROTOCOL + ADDRESS + ":" + PORT + "/txref/" + this.txRef + "/resolve"; - return new ResolveBTCRDID(this.endpoint).resolve(); - } - - // Following a tip - public String getTip() throws IOException { - this.endpoint = new URL(this.root, "txref/" + this.txRef + "/tip"); - //String url = PROTOCOL + ADDRESS + ":" + PORT + "/txref/" + this.txRef + "/tip"; - return new Tip(this.endpoint).getTip(); - } - - // Decode TxRef - public String decode() throws IOException { - this.endpoint = new URL(this.root, "txref/" + this.txRef + "/decode"); - //String url = PROTOCOL + ADDRESS + ":" + PORT + "/txref/" + this.txRef + "/decode"; - return new Decode(this.endpoint).decode(); - } - - // Get TxId from TxRef - public String txIdFromTxref() throws IOException, JSONException { - this.endpoint = new URL(this.root, "txref/" + this.txRef + "/txid"); - //String url = PROTOCOL + ADDRESS + ":" + PORT + "/txref/" + this.txRef + "/txid"; - return new TxIdFromTxRef(this.endpoint).getTxIdFromTxRef(); - } - - // Get Decoded Tx from TxId - public String getDecodedTx() throws IOException, JSONException { - String txId = txIdFromTxref(); - this.endpoint = new URL(this.root, "tx/" + txId); - //String url = PROTOCOL + ADDRESS + ":" + PORT + "/tx" + txId; - return new DecodedTx(this.endpoint).getTxFromTxId(); - } - - //Txid to Utxos for the address in Txid - public String getUtxosForAddress(String address) throws IOException { - this.endpoint = new URL(this.root, "addr/" + address + "/spends"); - return new UtxosForAddress(this.endpoint).getUtxos(); - } - -} diff --git a/app/src/main/java/com/opdup/btcrserviceclient/DecodedTx.java b/app/src/main/java/com/opdup/btcrserviceclient/DecodedTx.java deleted file mode 100644 index d6d6779..0000000 --- a/app/src/main/java/com/opdup/btcrserviceclient/DecodedTx.java +++ /dev/null @@ -1,19 +0,0 @@ -package com.opdup.btcrserviceclient; - -import java.io.IOException; -import java.net.URL; - -public class DecodedTx { - - private URL url; - - public DecodedTx(URL url){ - this.url = url; - } - - public String getTxFromTxId() throws IOException{ - ServiceConnection serviceConnection = new ServiceConnection(this.url); - return serviceConnection.getJsonString(); - } - -} diff --git a/app/src/main/java/com/opdup/btcrserviceclient/ResolveBTCRDID.java b/app/src/main/java/com/opdup/btcrserviceclient/ResolveBTCRDID.java deleted file mode 100644 index e7d1941..0000000 --- a/app/src/main/java/com/opdup/btcrserviceclient/ResolveBTCRDID.java +++ /dev/null @@ -1,19 +0,0 @@ -package com.opdup.btcrserviceclient; - -import java.io.IOException; -import java.net.URL; - -public class ResolveBTCRDID { - - private URL url; - - public ResolveBTCRDID(URL url){ - this.url = url; - } - - public String resolve() throws IOException { - ServiceConnection serviceConnection = new ServiceConnection(this.url); - return serviceConnection.getJsonString(); - } - -} \ No newline at end of file diff --git a/app/src/main/java/com/opdup/btcrserviceclient/ServiceConnection.java b/app/src/main/java/com/opdup/btcrserviceclient/ServiceConnection.java deleted file mode 100644 index e603ff0..0000000 --- a/app/src/main/java/com/opdup/btcrserviceclient/ServiceConnection.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.opdup.btcrserviceclient; - -import org.json.JSONException; -import org.json.JSONObject; - -import java.io.IOException; -import java.io.InputStream; -import java.net.HttpURLConnection; -import java.net.URL; -import java.util.Scanner; - -public class ServiceConnection { - - private URL url; - private HttpURLConnection connection; - - public ServiceConnection(URL url) throws IOException{ - this.url = url; - } - - - public void connect() throws IOException { - this.connection = (HttpURLConnection) this.url.openConnection(); - this.connection.setDoOutput(true); - this.connection.setInstanceFollowRedirects(false); - this.connection.setRequestMethod("GET"); - this.connection.setRequestProperty("Content-Type", "application/json"); - this.connection.setRequestProperty("charset", "utf-8"); - this.connection.connect(); - } - - public String getJsonString() throws IOException { - String json = null; - connect(); - InputStream inputStream = connection.getInputStream(); - json = new Scanner(inputStream, "UTF-8").useDelimiter("\\Z").next(); - return json; - } - - public JSONObject getJsonObject() throws IOException, JSONException { - return new JSONObject(getJsonString()); - } - -} \ No newline at end of file diff --git a/app/src/main/java/com/opdup/btcrserviceclient/TxIdFromTxRef.java b/app/src/main/java/com/opdup/btcrserviceclient/TxIdFromTxRef.java deleted file mode 100644 index d60e107..0000000 --- a/app/src/main/java/com/opdup/btcrserviceclient/TxIdFromTxRef.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.opdup.btcrserviceclient; - -import org.json.JSONException; -import org.json.JSONObject; - -import java.io.IOException; -import java.net.URL; - -public class TxIdFromTxRef { - - private URL url; - - public TxIdFromTxRef(URL url){ - this.url = url; - } - - public String getTxIdFromTxRef() throws IOException, JSONException { - String jsonString = new ServiceConnection(this.url).getJsonString(); - JSONObject jsonObject = new JSONObject(jsonString); - String txid = null; - if (jsonObject != null){ - txid = jsonObject.getString("txid"); - } - return txid; - } - -} \ No newline at end of file diff --git a/app/src/main/java/com/opdup/btcrserviceclient/UtxosForAddress.java b/app/src/main/java/com/opdup/btcrserviceclient/UtxosForAddress.java deleted file mode 100644 index e065925..0000000 --- a/app/src/main/java/com/opdup/btcrserviceclient/UtxosForAddress.java +++ /dev/null @@ -1,19 +0,0 @@ -package com.opdup.btcrserviceclient; - -import java.io.IOException; -import java.net.URL; - -public class UtxosForAddress { - - private URL url; - - public UtxosForAddress(URL url) { - this.url = url; - } - - public String getUtxos() throws IOException { - ServiceConnection serviceConnection = new ServiceConnection(this.url); - return serviceConnection.getJsonString(); - } - -} diff --git a/app/src/test/java/com/opdup/btcwallet/BTCRDIDResolverTest.java b/app/src/test/java/com/opdup/btcwallet/BTCRDIDResolverTest.java deleted file mode 100644 index e25cf6f..0000000 --- a/app/src/test/java/com/opdup/btcwallet/BTCRDIDResolverTest.java +++ /dev/null @@ -1,64 +0,0 @@ -package com.opdup.btcwallet; - -import com.opdup.btcrserviceclient.BTCRDIDResolver; - -import java.net.MalformedURLException; - -import org.json.JSONException; -import org.junit.Test; - -import java.io.IOException; -import static org.junit.Assert.*; -//import static org.mockito.mock.*; - -public class BTCRDIDResolverTest { - - private String BtcrDidString = "did:btcr:x705-jzv2-qqaz-7vuz"; //https://github.com/w3c-ccg/did-hackathon-2018/blob/master/BTCR-DID-Tests.md - private String address = ""; - - private BTCRDIDResolver tester; - - { - try { - tester = new BTCRDIDResolver(this.BtcrDidString); - } catch (MalformedURLException e) { - e.printStackTrace(); - } - } - - @Test - public void getBtcrDidResolve() throws IOException { - String string = tester.getBtcrDidResolve(); - assertFalse(string.isEmpty()); - } - - @Test - public void getTip() throws IOException { - String string = tester.getTip(); - assertFalse(string.isEmpty()); - } - - @Test - public void decode() throws IOException { - String string = tester.decode(); - assertFalse(string.isEmpty()); - } - - @Test - public void txIdFromTxref() throws IOException, JSONException { - String string = tester.txIdFromTxref(); - assertFalse(string.isEmpty()); - } - - @Test - public void getDecodedTx() throws IOException, JSONException { - String string = tester.getDecodedTx(); - assertFalse(string.isEmpty()); - } - - @Test - public void getUtxos() throws IOException, JSONException { - String string = tester.getUtxosForAddress(this.address); - assertFalse(string.isEmpty()); - } -} \ No newline at end of file diff --git a/btcrserviceclient/.gitignore b/btcrserviceclient/.gitignore new file mode 100644 index 0000000..796b96d --- /dev/null +++ b/btcrserviceclient/.gitignore @@ -0,0 +1 @@ +/build diff --git a/btcrserviceclient/build.gradle b/btcrserviceclient/build.gradle new file mode 100644 index 0000000..c0bf2ea --- /dev/null +++ b/btcrserviceclient/build.gradle @@ -0,0 +1,34 @@ +apply plugin: 'com.android.library' + +android { + compileSdkVersion 28 + + defaultConfig { + minSdkVersion 19 + targetSdkVersion 28 + versionCode 1 + versionName "1.0" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + } + +} + +dependencies { + implementation fileTree(dir: 'libs', include: ['*.jar']) + + implementation 'androidx.appcompat:appcompat:1.0.2' + implementation 'com.fasterxml.jackson.core:jackson-core:2.9.8' + testImplementation 'junit:junit:4.12' + androidTestImplementation 'androidx.test:runner:1.2.0' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0' + implementation project(':jsonldjava') +} diff --git a/btcrserviceclient/proguard-rules.pro b/btcrserviceclient/proguard-rules.pro new file mode 100644 index 0000000..f1b4245 --- /dev/null +++ b/btcrserviceclient/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/btcrserviceclient/src/androidTest/java/com/opdup/btcrdidserviceclient/ExampleInstrumentedTest.java b/btcrserviceclient/src/androidTest/java/com/opdup/btcrdidserviceclient/ExampleInstrumentedTest.java new file mode 100644 index 0000000..ba4bdba --- /dev/null +++ b/btcrserviceclient/src/androidTest/java/com/opdup/btcrdidserviceclient/ExampleInstrumentedTest.java @@ -0,0 +1,26 @@ +package com.opdup.btcrdidserviceclient; + +import android.content.Context; +import androidx.test.InstrumentationRegistry; +import androidx.test.runner.AndroidJUnit4; + +import org.junit.Test; +import org.junit.runner.RunWith; + +import static org.junit.Assert.*; + +/** + * Instrumented test, which will execute on an Android device. + * + * @see Testing documentation + */ +@RunWith(AndroidJUnit4.class) +public class ExampleInstrumentedTest { + @Test + public void useAppContext() { + // Context of the app under test. + Context appContext = InstrumentationRegistry.getTargetContext(); + + assertEquals("com.opdup.btcrdidserviceclient.test", appContext.getPackageName()); + } +} diff --git a/btcrserviceclient/src/main/AndroidManifest.xml b/btcrserviceclient/src/main/AndroidManifest.xml new file mode 100644 index 0000000..e041a93 --- /dev/null +++ b/btcrserviceclient/src/main/AndroidManifest.xml @@ -0,0 +1,7 @@ + + + + + + diff --git a/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/BTCRDIDResolver.java b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/BTCRDIDResolver.java new file mode 100644 index 0000000..3e0cfa5 --- /dev/null +++ b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/BTCRDIDResolver.java @@ -0,0 +1,151 @@ +package com.opdup.btcrserviceclient; + +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; + +import java.net.MalformedURLException; +import java.net.URL; + +public class BTCRDIDResolver { + + private String txRef; + + private URL root; + private URL endpoint; + private int TX_REF_SUBSTRING = 9; + public JSONArray resolveResult; + public JSONObject decodeResult; + public String pubKeyResult; + + public BTCRDIDResolver(String btcrDid, URL rootURL) { + this.root = rootURL; + this.txRef = "txtest1:" + btcrDid.substring(TX_REF_SUBSTRING); + } + + //Resolve + public JSONArray resolve() { + try { + this.endpoint = new URL(this.root, "txref/" + this.txRef + "/resolve"); + } catch (MalformedURLException e) { + System.err.print("MalformedURLException: " + e.getMessage()); + } + return new Resolve(this.endpoint).resolve(); + } + + //Get TxID + public String getTxId() { + try { + this.endpoint = new URL(this.root, "txref/" + this.txRef + "/txid"); + } catch (MalformedURLException e) { + System.err.print("MalformedURLException: " + e.getMessage()); + } + return new TxDetails(this.endpoint).getTxId(); + } + + //get UTXO index + public int getUtxoIndex() { + try { + this.endpoint = new URL(this.root, "txref/" + this.txRef + "/txid"); + } catch (MalformedURLException e) { + System.err.print("MalformedURLException: " + e.getMessage()); + } + return new TxDetails(this.endpoint).getUtxoIndex(); + } + + // Following a tip + public String getTip() { + JSONArray allTxs = resolve(); + String txid = getTxId(); + if (allTxs == null) { + return "null"; + } + if (new Tip(allTxs).followTip(txid)) { + return "Unspent"; + } else { + return "Spent"; + } + } + + // Decode TxRef + public JSONObject decode() { + try { + this.endpoint = new URL(this.root, "txref/" + this.txRef + "/decode"); + } catch (MalformedURLException e) { + System.err.print("MalformedURLException: " + e.getMessage()); + } + return new Decode(this.endpoint).decode(); + } + + // Get Decoded Tx from TxId + public JSONObject getDecodedTx() { + try { + this.endpoint = new URL(this.root, "tx/" + getTxId()); + } catch (MalformedURLException e) { + System.err.print("MalformedURLException: " + e.getMessage()); + } + return new DecodedTx(this.endpoint).getTxFromTxId(); + } + + //Txid to Utxos for the address in Txid + public String getUtxosForAddress(String address) { + try { + this.endpoint = new URL(this.root, "addr/" + address + "/spends"); + } catch (MalformedURLException e) { + System.err.print("MalformedURLException: " + e.getMessage()); + } + return new UtxosForAddress(this.endpoint).getUtxos(); + } + + //Return public key + public String getPublicKey() { + + String txid = getTxId(); + JSONArray allTxs = resolve(); + String[] values = null; + + String tip = getTip(); + + if (tip.equals("null")) { + return null; + } + try { + for (int i = 0; i < allTxs.length(); i++) { + JSONObject tx = allTxs.getJSONObject(i).getJSONObject("Transaction"); + String receivedTxId = tx.getString("txid"); + if (receivedTxId.equals(txid)) { + + int utxoIndex = getUtxoIndex(); + JSONObject input = tx.getJSONArray("vin").getJSONObject(utxoIndex); + JSONObject scriptSig = input.getJSONObject("scriptSig"); + + if (scriptSig == null) { + return null; + } + + String asm = scriptSig.getString("asm"); + + if (asm == null) { + return null; + } + + values = asm.split("\\s+"); + } + } + } catch (JSONException e) { + System.err.print("JSONException: " + e.getMessage()); + } + return values[1] /*+ " " + tip*/; + } + + + //Return DDO + public String getDDO() { + this.resolveResult = resolve(); + this.decodeResult = decode(); + this.pubKeyResult = getPublicKey(); + DDO ddo = new DDO(this.txRef, this.pubKeyResult, this.decodeResult, this.resolveResult); + return ddo.getDDO(); + } + +} diff --git a/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/DDO.java b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/DDO.java new file mode 100644 index 0000000..bd15115 --- /dev/null +++ b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/DDO.java @@ -0,0 +1,134 @@ +package com.opdup.btcrserviceclient; + +import com.github.jsonldjava.utils.JsonNormalizer; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; + +public class DDO { + + private String txRef; + private String pubKey; + + private JSONObject decode; + private JSONArray resolve; + + public DDO (String txRef, String pubKey, JSONObject decode, JSONArray resolve) { + this.txRef = txRef; + this.pubKey = pubKey; + this.decode = decode; + this.resolve = resolve; + } + + private JSONObject getSatoshiAuditTrail() { + + JSONObject satoshi = new JSONObject(); + + try { + + String chain = decode.getString("Hrp"); + if (chain.equals("txtest")) { + chain = "testnet"; + } + + String blockIndex = decode.getString("Position"); + + String blockHash; + String outputIndex; + String blocktime; + String time; + int burnFee; + + JSONObject tx = resolve.getJSONObject(0); + + blockHash = tx.getString("hash"); //######################## Error: no value for blockhash + time = tx.getString("time"); + blocktime = tx.getString("blocktime"); + + JSONArray vout = tx.getJSONArray("vout"); + JSONArray vin = tx.getJSONArray("vin"); + int voutValue = vout.getInt(0); + JSONArray prevOut = vin.getJSONArray(3); + int vinValue = prevOut.getInt(1); + burnFee = voutValue - vinValue; + outputIndex = vout.getString(1); + + satoshi.put("chain", chain); + satoshi.put("blockhash", blockHash); + satoshi.put("blockindex", blockIndex); + satoshi.put("outputindex", outputIndex); + satoshi.put("blocktime", blocktime); + satoshi.put("time", time); + satoshi.put("timereceived", time); + satoshi.put("burn-fee", burnFee); + + } catch (JSONException e) { + System.err.print("JSONException: " + e.getMessage()); + } + + return satoshi; + } + + private JSONObject setDDO() { + JSONObject object = null; + try { + object = new JSONObject(); + object.put("@context", "https://w3id.org/btcr/v1"); + object.put("id", "btcr:did:" + txRef); + + JSONObject pk = new JSONObject(); + pk.put("id", "btcr:did:" + txRef + "#Key01"); // #Key + pk.put("owner", "btcr:did:" + txRef); + pk.put("type", "EdDsaSAPublicKeySecp256k1"); + pk.put("publicKeyHex", pubKey); + + object.put("publicKey", pk); + + JSONObject auth = new JSONObject(); + auth.put("type", "EcDsaSAPublicKeySecp256k1Authentication"); + auth.put("publicKey", "#Key01"); // #Key + + object.put("authentication", auth); + + JSONObject service = new JSONObject(); + service.put("type", "BTCREndpoint"); + service.put("serviceEndpoint", "https://raw.githubusercontent.com/kimdhamilton/did/master/ddo.jsonld"); + + object.put("service", service); + + JSONObject satoshi = getSatoshiAuditTrail(); + + object.put("SatoshiAuditTrail", satoshi); + + } catch (JSONException e) { + System.err.print("JSONException: " + e.getMessage()); + } + + return object; + } + + public String getDDO() { + + String ddo = ""; + + Object document = setDDO(); + + new JsonNormalizer(document, new JsonNormalizer.OnNormalizedCompleted() { + @Override + public void OnNormalizedComplete(Object object) { + String normalized = (String) object; + } + }).execute(); + + /*try { + ddo = JsonUtils.toPrettyString(document); + } catch (IOException e) { + System.err.println("IOException: " + e.getMessage()); + }*/ + + ddo = document.toString(); + + return ddo; + } + +} diff --git a/app/src/main/java/com/opdup/btcrserviceclient/Decode.java b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/Decode.java similarity index 55% rename from app/src/main/java/com/opdup/btcrserviceclient/Decode.java rename to btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/Decode.java index 8347b08..6ab2c90 100644 --- a/app/src/main/java/com/opdup/btcrserviceclient/Decode.java +++ b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/Decode.java @@ -1,6 +1,7 @@ package com.opdup.btcrserviceclient; -import java.io.IOException; +import org.json.JSONObject; + import java.net.URL; public class Decode { @@ -11,8 +12,8 @@ public Decode(URL url){ this.url = url; } - public String decode() throws IOException{ - return new ServiceConnection(this.url).getJsonString(); + public JSONObject decode() { + return new ServiceConnection(this.url).getJsonObject(); } } \ No newline at end of file diff --git a/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/DecodedTx.java b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/DecodedTx.java new file mode 100644 index 0000000..111570d --- /dev/null +++ b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/DecodedTx.java @@ -0,0 +1,19 @@ +package com.opdup.btcrserviceclient; + +import org.json.JSONObject; + +import java.net.URL; + +public class DecodedTx { + + private URL url; + + public DecodedTx(URL url) { + this.url = url; + } + + public JSONObject getTxFromTxId() { + return new ServiceConnection(this.url).getJsonObject(); + } + +} diff --git a/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/Resolve.java b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/Resolve.java new file mode 100644 index 0000000..19cef80 --- /dev/null +++ b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/Resolve.java @@ -0,0 +1,19 @@ +package com.opdup.btcrserviceclient; + +import org.json.JSONArray; + +import java.net.URL; + +public class Resolve { + + private URL url; + + public Resolve(URL url){ + this.url = url; + } + + public JSONArray resolve() { + return new ServiceConnection(this.url).getJsonArray(); + } + +} \ No newline at end of file diff --git a/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/ServiceConnection.java b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/ServiceConnection.java new file mode 100644 index 0000000..4dc995c --- /dev/null +++ b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/ServiceConnection.java @@ -0,0 +1,99 @@ +package com.opdup.btcrserviceclient; + +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.HttpURLConnection; +import java.net.SocketException; +import java.net.URL; + +public class ServiceConnection { + + private URL url; + private HttpURLConnection connection; + + public ServiceConnection(URL url) { + this.url = url; + } + + private void connect() { + try { + this.connection = (HttpURLConnection) this.url.openConnection(); + this.connection.setDoOutput(false); + this.connection.setDoInput(true); + this.connection.setUseCaches(false); + this.connection.setRequestMethod("GET"); + this.connection.setRequestProperty("Content-Type", "application/json"); + this.connection.setRequestProperty("charset", "utf-8"); + this.connection.connect(); + } catch (SocketException e) { + System.err.print("SocketException: " + e.getMessage()); + } catch (IOException e) { + System.err.print("IOException: " + e.getMessage()); + } + } + + //New getJson method + public String getJsonString() { + StringBuilder response = new StringBuilder(); + String responseJSON; + try { + + connect(); + + int status = connection.getResponseCode(); + + if (status != 200) { + throw new IOException("Error: " + status); + } else { + BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream())); + String inputLine; + while ((inputLine = in.readLine()) != null) { + response.append(inputLine); + } + in.close(); + } + + } catch (IOException e) { + System.err.print("IOException: " + e.getMessage()); + } finally { + if (connection != null) { + connection.disconnect(); + } + responseJSON = response.toString(); + } + + return responseJSON; + } + + public JSONObject getJsonObject() { + + JSONObject response = null; + + try { + response = new JSONObject(getJsonString()); + } catch (JSONException e) { + System.err.print("JSONException: " + e.getMessage()); + } + + return response; + } + + public JSONArray getJsonArray() { + + JSONArray response = null; + + try { + response = new JSONArray(getJsonString()); + } catch (JSONException e) { + System.err.print("JSONException: " + e.getMessage()); + } + + return response; + } + +} \ No newline at end of file diff --git a/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/Tip.java b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/Tip.java new file mode 100644 index 0000000..fa2abd7 --- /dev/null +++ b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/Tip.java @@ -0,0 +1,33 @@ +package com.opdup.btcrserviceclient; + +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; + +public class Tip { + + private JSONArray allTxs; + + public Tip(JSONArray allTxs){ + this.allTxs = allTxs; + } + + public boolean followTip(String txid) { + try { + for (int i = 1; i < this.allTxs.length(); i++) { + JSONObject tx = this.allTxs.getJSONObject(i).getJSONObject("Transaction"); + JSONArray vin = tx.getJSONArray("vin"); + String txidVin = vin.getJSONObject(0).getString("txid"); + + if (txid.equals(txidVin)) { + return true; //Transaction has been spent + } + + } + } catch (JSONException e) { + System.err.print("JSONException: " + e.getMessage()); + } + return false; + } + +} \ No newline at end of file diff --git a/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/TxDetails.java b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/TxDetails.java new file mode 100644 index 0000000..623395e --- /dev/null +++ b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/TxDetails.java @@ -0,0 +1,45 @@ +package com.opdup.btcrserviceclient; + +import android.util.Pair; +import org.json.JSONException; +import org.json.JSONObject; + +import java.net.URL; + +public class TxDetails { + + private URL url; + + private JSONObject jsonObject; + private String txId; + private Integer utxoIndex; + + public TxDetails(URL url) { + this.url = url; + this.jsonObject = new ServiceConnection(this.url).getJsonObject(); + } + + private Pair getTxDetails() { + + try { + this.txId = jsonObject.getString("txid"); + this.utxoIndex = jsonObject.getInt("utxo_index"); + } catch (JSONException e) { + System.err.print("JSONException: " + e.getMessage()); + } + + return new Pair<>(txId, utxoIndex); + + } + + //Get TxID + public String getTxId() { + return getTxDetails().first; + } + + //get UTXO index + public int getUtxoIndex() { + return getTxDetails().second; + } + +} \ No newline at end of file diff --git a/app/src/main/java/com/opdup/btcrserviceclient/Tip.java b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/UtxosForAddress.java similarity index 60% rename from app/src/main/java/com/opdup/btcrserviceclient/Tip.java rename to btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/UtxosForAddress.java index a6e1d7d..3db524e 100644 --- a/app/src/main/java/com/opdup/btcrserviceclient/Tip.java +++ b/btcrserviceclient/src/main/java/com/opdup/btcrserviceclient/UtxosForAddress.java @@ -1,18 +1,17 @@ package com.opdup.btcrserviceclient; -import java.io.IOException; import java.net.URL; -public class Tip { +public class UtxosForAddress { private URL url; - public Tip(URL url){ + public UtxosForAddress(URL url) { this.url = url; } - public String getTip() throws IOException{ + public String getUtxos() { return new ServiceConnection(this.url).getJsonString(); } -} \ No newline at end of file +} diff --git a/btcrserviceclient/src/main/res/values/strings.xml b/btcrserviceclient/src/main/res/values/strings.xml new file mode 100644 index 0000000..e3c1a93 --- /dev/null +++ b/btcrserviceclient/src/main/res/values/strings.xml @@ -0,0 +1,3 @@ + + btcrdidserviceclient + diff --git a/btcrserviceclient/src/test/java/com/opdup/btcrdidserviceclient/ExampleUnitTest.java b/btcrserviceclient/src/test/java/com/opdup/btcrdidserviceclient/ExampleUnitTest.java new file mode 100644 index 0000000..edbc504 --- /dev/null +++ b/btcrserviceclient/src/test/java/com/opdup/btcrdidserviceclient/ExampleUnitTest.java @@ -0,0 +1,17 @@ +package com.opdup.btcrdidserviceclient; + +import org.junit.Test; + +import static org.junit.Assert.*; + +/** + * Example local unit test, which will execute on the development machine (host). + * + * @see Testing documentation + */ +public class ExampleUnitTest { + @Test + public void addition_isCorrect() { + assertEquals(4, 2 + 2); + } +} \ No newline at end of file diff --git a/build.gradle b/build.gradle index 13c49a1..4ec532d 100644 --- a/build.gradle +++ b/build.gradle @@ -1,17 +1,18 @@ // Top-level build file where you can add configuration options common to all sub-projects/modules. buildscript { - + ext.kotlin_version = '1.3.31' repositories { google() jcenter() + } dependencies { - classpath 'com.android.tools.build:gradle:3.2.1' - - + classpath 'com.android.tools.build:gradle:3.4.2' + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" // NOTE: Do not place your application dependencies here; they belong // in the individual module build.gradle files + } } @@ -19,10 +20,10 @@ allprojects { repositories { google() jcenter() - mavenCentral() + } } task clean(type: Delete) { delete rootProject.buildDir -} \ No newline at end of file +} diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 9a4163a..c4486d4 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-4.6-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/jsonldjava/.gitignore b/jsonldjava/.gitignore new file mode 100644 index 0000000..796b96d --- /dev/null +++ b/jsonldjava/.gitignore @@ -0,0 +1 @@ +/build diff --git a/jsonldjava/build.gradle b/jsonldjava/build.gradle new file mode 100644 index 0000000..a27a7f5 --- /dev/null +++ b/jsonldjava/build.gradle @@ -0,0 +1,36 @@ +apply plugin: 'com.android.library' + +android { + compileSdkVersion 28 + + + defaultConfig { + minSdkVersion 19 + targetSdkVersion 28 + versionCode 1 + versionName "1.0" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + } + +} + +dependencies { + implementation fileTree(dir: 'libs', include: ['*.jar']) + + implementation 'androidx.appcompat:appcompat:1.0.2' + testImplementation 'junit:junit:4.12' + androidTestImplementation 'androidx.test:runner:1.2.0' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0' + implementation 'com.fasterxml.jackson.core:jackson-core:2.9.8' + implementation 'com.fasterxml.jackson.core:jackson-annotations:2.9.8' + implementation 'com.fasterxml.jackson.core:jackson-databind:2.9.8' +} diff --git a/jsonldjava/proguard-rules.pro b/jsonldjava/proguard-rules.pro new file mode 100644 index 0000000..f1b4245 --- /dev/null +++ b/jsonldjava/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/jsonldjava/src/androidTest/java/com/opdup/jsonldjava/ExampleInstrumentedTest.java b/jsonldjava/src/androidTest/java/com/opdup/jsonldjava/ExampleInstrumentedTest.java new file mode 100644 index 0000000..534630e --- /dev/null +++ b/jsonldjava/src/androidTest/java/com/opdup/jsonldjava/ExampleInstrumentedTest.java @@ -0,0 +1,26 @@ +package com.opdup.jsonldjava; + +import android.content.Context; +import androidx.test.InstrumentationRegistry; +import androidx.test.runner.AndroidJUnit4; + +import org.junit.Test; +import org.junit.runner.RunWith; + +import static org.junit.Assert.*; + +/** + * Instrumented test, which will execute on an Android device. + * + * @see Testing documentation + */ +@RunWith(AndroidJUnit4.class) +public class ExampleInstrumentedTest { + @Test + public void useAppContext() { + // Context of the app under test. + Context appContext = InstrumentationRegistry.getTargetContext(); + + assertEquals("com.opdup.jsonldjava.test", appContext.getPackageName()); + } +} diff --git a/jsonldjava/src/main/AndroidManifest.xml b/jsonldjava/src/main/AndroidManifest.xml new file mode 100644 index 0000000..cb6f109 --- /dev/null +++ b/jsonldjava/src/main/AndroidManifest.xml @@ -0,0 +1,2 @@ + diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/Context.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/Context.java new file mode 100644 index 0000000..9c1f0a0 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/Context.java @@ -0,0 +1,1197 @@ +package com.github.jsonldjava.core; + +import com.github.jsonldjava.core.JsonLdError.Error; +import com.github.jsonldjava.utils.JsonLdUrl; +import com.github.jsonldjava.utils.Obj; + +import java.util.*; + +import static com.github.jsonldjava.core.JsonLdUtils.compareShortestLeast; +import static com.github.jsonldjava.utils.Obj.newMap; + +/** + * A helper class which still stores all the values in a map but gives member + * variables easily access certain keys + * + * @author tristan + * + */ +public class Context extends LinkedHashMap { + + //private static final long serialVersionUID = 2894534897574805571L; + + private JsonLdOptions options; + private Map termDefinitions; + public Map inverse = null; + + public Context() { + this(new JsonLdOptions()); + } + + public Context(JsonLdOptions opts) { + super(); + init(opts); + } + + public Context(Map map, JsonLdOptions opts) { + super(map); + checkEmptyKey(map); + init(opts); + } + + public Context(Map map) { + super(map); + checkEmptyKey(map); + init(new JsonLdOptions()); + } + + public Context(Object context, JsonLdOptions opts) { + // TODO: load remote context + super(context instanceof Map ? (Map) context : null); + init(opts); + } + + private void init(JsonLdOptions options) { + this.options = options; + if (options.getBase() != null) { + this.put(JsonLdConsts.BASE, options.getBase()); + } + this.termDefinitions = newMap(); + } + + /** + * Value Compaction Algorithm + * + * http://json-ld.org/spec/latest/json-ld-api/#value-compaction + * + * @param activeProperty + * The Active Property + * @param value + * The value to compact + * @return The compacted value + */ + public Object compactValue(String activeProperty, Map value) { + // 1) + int numberMembers = value.size(); + // 2) + if (value.containsKey(JsonLdConsts.INDEX) + && JsonLdConsts.INDEX.equals(this.getContainer(activeProperty))) { + numberMembers--; + } + // 3) + if (numberMembers > 2) { + return value; + } + // 4) + final String typeMapping = getTypeMapping(activeProperty); + final String languageMapping = getLanguageMapping(activeProperty); + if (value.containsKey(JsonLdConsts.ID)) { + // 4.1) + if (numberMembers == 1 && JsonLdConsts.ID.equals(typeMapping)) { + return compactIri((String) value.get(JsonLdConsts.ID)); + } + // 4.2) + if (numberMembers == 1 && JsonLdConsts.VOCAB.equals(typeMapping)) { + return compactIri((String) value.get(JsonLdConsts.ID), true); + } + // 4.3) + return value; + } + final Object valueValue = value.get(JsonLdConsts.VALUE); + // 5) + if (value.containsKey(JsonLdConsts.TYPE) + && Obj.equals(value.get(JsonLdConsts.TYPE), typeMapping)) { + return valueValue; + } + // 6) + if (value.containsKey(JsonLdConsts.LANGUAGE)) { + // TODO: SPEC: doesn't specify to check default language as well + if (Obj.equals(value.get(JsonLdConsts.LANGUAGE), languageMapping) || Obj + .equals(value.get(JsonLdConsts.LANGUAGE), this.get(JsonLdConsts.LANGUAGE))) { + return valueValue; + } + } + // 7) + if (numberMembers == 1 && (!(valueValue instanceof String) + || !this.containsKey(JsonLdConsts.LANGUAGE) + || (termDefinitions.containsKey(activeProperty) + && getTermDefinition(activeProperty).containsKey(JsonLdConsts.LANGUAGE) + && languageMapping == null))) { + return valueValue; + } + // 8) + return value; + } + + /** + * Context Processing Algorithm + * + * http://json-ld.org/spec/latest/json-ld-api/#context-processing-algorithms + * + * @param localContext + * The Local Context object. + * @param remoteContexts + * The list of Strings denoting the remote Context URLs. + * @return The parsed and merged Context. + * @throws JsonLdError + * If there is an error parsing the contexts. + */ + @SuppressWarnings("unchecked") + public Context parse(Object localContext, List remoteContexts) throws JsonLdError { + return parse(localContext, remoteContexts, false); + } + + /** + * Helper method used to work around logic errors related to the recursive + * nature of the JSONLD-API Context Processing Algorithm. + * + * @param localContext + * The Local Context object. + * @param remoteContexts + * The list of Strings denoting the remote Context URLs. + * @param parsingARemoteContext + * True if localContext represents a remote context that has been + * parsed and sent into this method and false otherwise. This + * must be set to know whether to propagate the @code{@base} key + * from the context to the result. + * @return The parsed and merged Context. + * @throws JsonLdError + * If there is an error parsing the contexts. + */ + private Context parse(Object localContext, List remoteContexts, + boolean parsingARemoteContext) throws JsonLdError { + if (remoteContexts == null) { + remoteContexts = new ArrayList(); + } + // 1. Initialize result to the result of cloning active context. + Context result = this.clone(); // TODO: clone? + // 2) + if (!(localContext instanceof List)) { + final Object temp = localContext; + localContext = new ArrayList(); + ((List) localContext).add(temp); + } + // 3) + for (final Object context : ((List) localContext)) { + // 3.1) + if (context == null) { + result = new Context(this.options); + continue; + } else if (context instanceof Context) { + result = ((Context) context).clone(); + } + // 3.2) + else if (context instanceof String) { + String uri = (String) result.get(JsonLdConsts.BASE); + uri = JsonLdUrl.resolve(uri, (String) context); + // 3.2.2 + if (remoteContexts.contains(uri)) { + throw new JsonLdError(Error.RECURSIVE_CONTEXT_INCLUSION, uri); + } + remoteContexts.add(uri); + + // 3.2.3: Dereference context + final RemoteDocument rd = this.options.getDocumentLoader().loadDocument(uri); + final Object remoteContext = rd.getDocument(); + if (!(remoteContext instanceof Map) || !((Map) remoteContext) + .containsKey(JsonLdConsts.CONTEXT)) { + // If the dereferenced document has no top-level JSON object + // with an @context member + throw new JsonLdError(Error.INVALID_REMOTE_CONTEXT, context); + } + final Object tempContext = ((Map) remoteContext) + .get(JsonLdConsts.CONTEXT); + + // 3.2.4 + result = result.parse(tempContext, remoteContexts, true); + // 3.2.5 + continue; + } else if (!(context instanceof Map)) { + // 3.3 + throw new JsonLdError(Error.INVALID_LOCAL_CONTEXT, context); + } + checkEmptyKey((Map) context); + // 3.4 + if (!parsingARemoteContext + && ((Map) context).containsKey(JsonLdConsts.BASE)) { + // 3.4.1 + final Object value = ((Map) context).get(JsonLdConsts.BASE); + // 3.4.2 + if (value == null) { + result.remove(JsonLdConsts.BASE); + } else if (value instanceof String) { + // 3.4.3 + if (JsonLdUtils.isAbsoluteIri((String) value)) { + result.put(JsonLdConsts.BASE, value); + } else { + // 3.4.4 + final String baseUri = (String) result.get(JsonLdConsts.BASE); + if (!JsonLdUtils.isAbsoluteIri(baseUri)) { + throw new JsonLdError(Error.INVALID_BASE_IRI, baseUri); + } + result.put(JsonLdConsts.BASE, JsonLdUrl.resolve(baseUri, (String) value)); + } + } else { + // 3.4.5 + throw new JsonLdError(JsonLdError.Error.INVALID_BASE_IRI, + "@base must be a string"); + } + } + + // 3.5 + if (((Map) context).containsKey(JsonLdConsts.VOCAB)) { + final Object value = ((Map) context).get(JsonLdConsts.VOCAB); + if (value == null) { + result.remove(JsonLdConsts.VOCAB); + } else if (value instanceof String) { + if (JsonLdUtils.isAbsoluteIri((String) value)) { + result.put(JsonLdConsts.VOCAB, value); + } else { + throw new JsonLdError(Error.INVALID_VOCAB_MAPPING, + "@value must be an absolute IRI"); + } + } else { + throw new JsonLdError(Error.INVALID_VOCAB_MAPPING, + "@vocab must be a string or null"); + } + } + + // 3.6 + if (((Map) context).containsKey(JsonLdConsts.LANGUAGE)) { + final Object value = ((Map) context).get(JsonLdConsts.LANGUAGE); + if (value == null) { + result.remove(JsonLdConsts.LANGUAGE); + } else if (value instanceof String) { + result.put(JsonLdConsts.LANGUAGE, ((String) value).toLowerCase()); + } else { + throw new JsonLdError(Error.INVALID_DEFAULT_LANGUAGE, value); + } + } + + // 3.7 + final Map defined = new LinkedHashMap(); + for (final String key : ((Map) context).keySet()) { + if (JsonLdConsts.BASE.equals(key) || JsonLdConsts.VOCAB.equals(key) + || JsonLdConsts.LANGUAGE.equals(key)) { + continue; + } + result.createTermDefinition((Map) context, key, defined); + } + } + return result; + } + + private void checkEmptyKey(final Map map) { + if (map.containsKey("")) { + // the term MUST NOT be an empty string ("") + // https://www.w3.org/TR/json-ld/#h3_terms + throw new JsonLdError(Error.INVALID_TERM_DEFINITION, + String.format("empty key for value '%s'", map.get(""))); + } + } + + public Context parse(Object localContext) throws JsonLdError { + return this.parse(localContext, new ArrayList()); + } + + /** + * Create Term Definition Algorithm + * + * http://json-ld.org/spec/latest/json-ld-api/#create-term-definition + * + * @param context + * @param term + * @param defined + * @throws JsonLdError + */ + private void createTermDefinition(Map context, String term, + Map defined) throws JsonLdError { + if (defined.containsKey(term)) { + if (Boolean.TRUE.equals(defined.get(term))) { + return; + } + throw new JsonLdError(Error.CYCLIC_IRI_MAPPING, term); + } + + defined.put(term, false); + + if (JsonLdUtils.isKeyword(term) + /*&& !(options.getAllowContainerSetOnType() && JsonLdConsts.TYPE.equals(term) + && !(context.get(term)).toString().contains(JsonLdConsts.ID))*/) { + throw new JsonLdError(Error.KEYWORD_REDEFINITION, term); + } + + this.termDefinitions.remove(term); + Object value = context.get(term); + if (value == null || (value instanceof Map + && ((Map) value).containsKey(JsonLdConsts.ID) + && ((Map) value).get(JsonLdConsts.ID) == null)) { + this.termDefinitions.put(term, null); + defined.put(term, true); + return; + } + + if (value instanceof String) { + value = newMap(JsonLdConsts.ID, value); + } + + if (!(value instanceof Map)) { + throw new JsonLdError(Error.INVALID_TERM_DEFINITION, value); + } + + // casting the value so it doesn't have to be done below everytime + final Map val = (Map) value; + + // 9) create a new term definition + final Map definition = newMap(); + + // 10) + if (val.containsKey(JsonLdConsts.TYPE)) { + if (!(val.get(JsonLdConsts.TYPE) instanceof String)) { + throw new JsonLdError(Error.INVALID_TYPE_MAPPING, val.get(JsonLdConsts.TYPE)); + } + String type = (String) val.get(JsonLdConsts.TYPE); + try { + type = this.expandIri((String) val.get(JsonLdConsts.TYPE), false, true, context, + defined); + } catch (final JsonLdError error) { + if (error.getType() != Error.INVALID_IRI_MAPPING) { + throw error; + } + throw new JsonLdError(Error.INVALID_TYPE_MAPPING, type, error); + } + // TODO: fix check for absoluteIri (blank nodes shouldn't count, at + // least not here!) + if (JsonLdConsts.ID.equals(type) || JsonLdConsts.VOCAB.equals(type) + || (!type.startsWith(JsonLdConsts.BLANK_NODE_PREFIX) + && JsonLdUtils.isAbsoluteIri(type))) { + definition.put(JsonLdConsts.TYPE, type); + } else { + throw new JsonLdError(Error.INVALID_TYPE_MAPPING, type); + } + } + + // 11) + if (val.containsKey(JsonLdConsts.REVERSE)) { + if (val.containsKey(JsonLdConsts.ID)) { + throw new JsonLdError(Error.INVALID_REVERSE_PROPERTY, val); + } + if (!(val.get(JsonLdConsts.REVERSE) instanceof String)) { + throw new JsonLdError(Error.INVALID_IRI_MAPPING, + "Expected String for @reverse value. got " + + (val.get(JsonLdConsts.REVERSE) == null ? "null" + : val.get(JsonLdConsts.REVERSE).getClass())); + } + final String reverse = this.expandIri((String) val.get(JsonLdConsts.REVERSE), false, + true, context, defined); + if (!JsonLdUtils.isAbsoluteIri(reverse)) { + throw new JsonLdError(Error.INVALID_IRI_MAPPING, + "Non-absolute @reverse IRI: " + reverse); + } + definition.put(JsonLdConsts.ID, reverse); + if (val.containsKey(JsonLdConsts.CONTAINER)) { + final String container = (String) val.get(JsonLdConsts.CONTAINER); + if (container == null || JsonLdConsts.SET.equals(container) + || JsonLdConsts.INDEX.equals(container)) { + definition.put(JsonLdConsts.CONTAINER, container); + } else { + throw new JsonLdError(Error.INVALID_REVERSE_PROPERTY, + "reverse properties only support set- and index-containers"); + } + } + definition.put(JsonLdConsts.REVERSE, true); + this.termDefinitions.put(term, definition); + defined.put(term, true); + return; + } + + // 12) + definition.put(JsonLdConsts.REVERSE, false); + + // 13) + if (val.get(JsonLdConsts.ID) != null && !term.equals(val.get(JsonLdConsts.ID))) { + if (!(val.get(JsonLdConsts.ID) instanceof String)) { + throw new JsonLdError(Error.INVALID_IRI_MAPPING, + "expected value of @id to be a string"); + } + + final String res = this.expandIri((String) val.get(JsonLdConsts.ID), false, true, + context, defined); + if (JsonLdUtils.isKeyword(res) || JsonLdUtils.isAbsoluteIri(res)) { + if (JsonLdConsts.CONTEXT.equals(res)) { + throw new JsonLdError(Error.INVALID_KEYWORD_ALIAS, "cannot alias @context"); + } + definition.put(JsonLdConsts.ID, res); + } else { + throw new JsonLdError(Error.INVALID_IRI_MAPPING, + "resulting IRI mapping should be a keyword, absolute IRI or blank node"); + } + } + + // 14) + else if (term.indexOf(":") >= 0) { + final int colIndex = term.indexOf(":"); + final String prefix = term.substring(0, colIndex); + final String suffix = term.substring(colIndex + 1); + if (context.containsKey(prefix)) { + this.createTermDefinition(context, prefix, defined); + } + if (termDefinitions.containsKey(prefix)) { + definition.put(JsonLdConsts.ID, + ((Map) termDefinitions.get(prefix)).get(JsonLdConsts.ID) + + suffix); + } else { + definition.put(JsonLdConsts.ID, term); + } + // 15) + } else if (this.containsKey(JsonLdConsts.VOCAB)) { + definition.put(JsonLdConsts.ID, this.get(JsonLdConsts.VOCAB) + term); + } else if (!JsonLdConsts.TYPE.equals(term)) { + throw new JsonLdError(Error.INVALID_IRI_MAPPING, + "relative term definition without vocab mapping"); + } + + // 16) + if (val.containsKey(JsonLdConsts.CONTAINER)) { + final String container = (String) val.get(JsonLdConsts.CONTAINER); + if (!JsonLdConsts.LIST.equals(container) && !JsonLdConsts.SET.equals(container) + && !JsonLdConsts.INDEX.equals(container) + && !JsonLdConsts.LANGUAGE.equals(container)) { + throw new JsonLdError(Error.INVALID_CONTAINER_MAPPING, + "@container must be either @list, @set, @index, or @language"); + } + definition.put(JsonLdConsts.CONTAINER, container); + if (JsonLdConsts.TYPE.equals(term)) { + definition.put(JsonLdConsts.ID, "type"); + } + } + + // 17) + if (val.containsKey(JsonLdConsts.LANGUAGE) && !val.containsKey(JsonLdConsts.TYPE)) { + if (val.get(JsonLdConsts.LANGUAGE) == null + || val.get(JsonLdConsts.LANGUAGE) instanceof String) { + final String language = (String) val.get(JsonLdConsts.LANGUAGE); + definition.put(JsonLdConsts.LANGUAGE, + language != null ? language.toLowerCase() : null); + } else { + throw new JsonLdError(Error.INVALID_LANGUAGE_MAPPING, + "@language must be a string or null"); + } + } + + // 18) + this.termDefinitions.put(term, definition); + defined.put(term, true); + } + + /** + * IRI Expansion Algorithm + * + * http://json-ld.org/spec/latest/json-ld-api/#iri-expansion + * + * @param value + * @param relative + * @param vocab + * @param context + * @param defined + * @return + * @throws JsonLdError + */ + String expandIri(String value, boolean relative, boolean vocab, Map context, + Map defined) throws JsonLdError { + // 1) + if (value == null || JsonLdUtils.isKeyword(value)) { + return value; + } + // 2) + if (context != null && context.containsKey(value) + && !Boolean.TRUE.equals(defined.get(value))) { + this.createTermDefinition(context, value, defined); + } + // 3) + if (vocab && this.termDefinitions.containsKey(value)) { + final Map td = (LinkedHashMap) this.termDefinitions + .get(value); + if (td != null) { + return (String) td.get(JsonLdConsts.ID); + } else { + return null; + } + } + // 4) + final int colIndex = value.indexOf(":"); + if (colIndex >= 0) { + // 4.1) + final String prefix = value.substring(0, colIndex); + final String suffix = value.substring(colIndex + 1); + // 4.2) + if ("_".equals(prefix) || suffix.startsWith("//")) { + return value; + } + // 4.3) + if (context != null && context.containsKey(prefix) + && (!defined.containsKey(prefix) || defined.get(prefix) == false)) { + this.createTermDefinition(context, prefix, defined); + } + // 4.4) + if (this.termDefinitions.containsKey(prefix)) { + return (String) ((Map) this.termDefinitions.get(prefix)) + .get(JsonLdConsts.ID) + suffix; + } + // 4.5) + return value; + } + // 5) + if (vocab && this.containsKey(JsonLdConsts.VOCAB)) { + return this.get(JsonLdConsts.VOCAB) + value; + } + // 6) + else if (relative) { + return JsonLdUrl.resolve((String) this.get(JsonLdConsts.BASE), value); + } else if (context != null && JsonLdUtils.isRelativeIri(value)) { + throw new JsonLdError(Error.INVALID_IRI_MAPPING, "not an absolute IRI: " + value); + } + // 7) + return value; + } + + /** + * IRI Compaction Algorithm + * + * http://json-ld.org/spec/latest/json-ld-api/#iri-compaction + * + * Compacts an IRI or keyword into a term or prefix if it can be. If the IRI + * has an associated value it may be passed. + * + * @param iri + * the IRI to compact. + * @param value + * the value to check or null. + * @param relativeToVocab + * options for how to compact IRIs: vocab: true to split + * after @vocab, false not to. + * @param reverse + * true if a reverse property is being compacted, false if not. + * + * @return the compacted term, prefix, keyword alias, or the original IRI. + */ + String compactIri(String iri, Object value, boolean relativeToVocab, boolean reverse) { + // 1) + if (iri == null) { + return null; + } + + // 2) + if (relativeToVocab && getInverse().containsKey(iri)) { + // 2.1) + String defaultLanguage = (String) this.get(JsonLdConsts.LANGUAGE); + if (defaultLanguage == null) { + defaultLanguage = JsonLdConsts.NONE; + } + + // 2.2) + final List containers = new ArrayList(); + // 2.3) + String typeLanguage = JsonLdConsts.LANGUAGE; + String typeLanguageValue = JsonLdConsts.NULL; + + // 2.4) + if (value instanceof Map + && ((Map) value).containsKey(JsonLdConsts.INDEX)) { + containers.add(JsonLdConsts.INDEX); + } + + // 2.5) + if (reverse) { + typeLanguage = JsonLdConsts.TYPE; + typeLanguageValue = JsonLdConsts.REVERSE; + containers.add(JsonLdConsts.SET); + } + // 2.6) + else if (value instanceof Map + && ((Map) value).containsKey(JsonLdConsts.LIST)) { + // 2.6.1) + if (!((Map) value).containsKey(JsonLdConsts.INDEX)) { + containers.add(JsonLdConsts.LIST); + } + // 2.6.2) + final List list = (List) ((Map) value) + .get(JsonLdConsts.LIST); + // 2.6.3) + String commonLanguage = (list.size() == 0) ? defaultLanguage : null; + String commonType = null; + // 2.6.4) + for (final Object item : list) { + // 2.6.4.1) + String itemLanguage = JsonLdConsts.NONE; + String itemType = JsonLdConsts.NONE; + // 2.6.4.2) + if (JsonLdUtils.isValue(item)) { + // 2.6.4.2.1) + if (((Map) item).containsKey(JsonLdConsts.LANGUAGE)) { + itemLanguage = (String) ((Map) item) + .get(JsonLdConsts.LANGUAGE); + } + // 2.6.4.2.2) + else if (((Map) item).containsKey(JsonLdConsts.TYPE)) { + itemType = (String) ((Map) item).get(JsonLdConsts.TYPE); + } + // 2.6.4.2.3) + else { + itemLanguage = JsonLdConsts.NULL; + } + } + // 2.6.4.3) + else { + itemType = JsonLdConsts.ID; + } + // 2.6.4.4) + if (commonLanguage == null) { + commonLanguage = itemLanguage; + } + // 2.6.4.5) + else if (!commonLanguage.equals(itemLanguage) && JsonLdUtils.isValue(item)) { + commonLanguage = JsonLdConsts.NONE; + } + // 2.6.4.6) + if (commonType == null) { + commonType = itemType; + } + // 2.6.4.7) + else if (!commonType.equals(itemType)) { + commonType = JsonLdConsts.NONE; + } + // 2.6.4.8) + if (JsonLdConsts.NONE.equals(commonLanguage) + && JsonLdConsts.NONE.equals(commonType)) { + break; + } + } + // 2.6.5) + commonLanguage = (commonLanguage != null) ? commonLanguage : JsonLdConsts.NONE; + // 2.6.6) + commonType = (commonType != null) ? commonType : JsonLdConsts.NONE; + // 2.6.7) + if (!JsonLdConsts.NONE.equals(commonType)) { + typeLanguage = JsonLdConsts.TYPE; + typeLanguageValue = commonType; + } + // 2.6.8) + else { + typeLanguageValue = commonLanguage; + } + } + // 2.7) + else { + // 2.7.1) + if (value instanceof Map + && ((Map) value).containsKey(JsonLdConsts.VALUE)) { + // 2.7.1.1) + if (((Map) value).containsKey(JsonLdConsts.LANGUAGE) + && !((Map) value).containsKey(JsonLdConsts.INDEX)) { + containers.add(JsonLdConsts.LANGUAGE); + typeLanguageValue = (String) ((Map) value) + .get(JsonLdConsts.LANGUAGE); + } + // 2.7.1.2) + else if (((Map) value).containsKey(JsonLdConsts.TYPE)) { + typeLanguage = JsonLdConsts.TYPE; + typeLanguageValue = (String) ((Map) value) + .get(JsonLdConsts.TYPE); + } + } + // 2.7.2) + else { + typeLanguage = JsonLdConsts.TYPE; + typeLanguageValue = JsonLdConsts.ID; + } + // 2.7.3) + containers.add(JsonLdConsts.SET); + } + + // 2.8) + containers.add(JsonLdConsts.NONE); + // 2.9) + if (typeLanguageValue == null) { + typeLanguageValue = JsonLdConsts.NULL; + } + // 2.10) + final List preferredValues = new ArrayList(); + // 2.11) + if (JsonLdConsts.REVERSE.equals(typeLanguageValue)) { + preferredValues.add(JsonLdConsts.REVERSE); + } + // 2.12) + if ((JsonLdConsts.REVERSE.equals(typeLanguageValue) + || JsonLdConsts.ID.equals(typeLanguageValue)) && (value instanceof Map) + && ((Map) value).containsKey(JsonLdConsts.ID)) { + // 2.12.1) + final String result = this.compactIri( + (String) ((Map) value).get(JsonLdConsts.ID), null, true, + true); + if (termDefinitions.containsKey(result) + && ((Map) termDefinitions.get(result)) + .containsKey(JsonLdConsts.ID) + && ((Map) value).get(JsonLdConsts.ID) + .equals(((Map) termDefinitions.get(result)) + .get(JsonLdConsts.ID))) { + preferredValues.add(JsonLdConsts.VOCAB); + preferredValues.add(JsonLdConsts.ID); + } + // 2.12.2) + else { + preferredValues.add(JsonLdConsts.ID); + preferredValues.add(JsonLdConsts.VOCAB); + } + } + // 2.13) + else { + preferredValues.add(typeLanguageValue); + } + preferredValues.add(JsonLdConsts.NONE); + + // 2.14) + final String term = selectTerm(iri, containers, typeLanguage, preferredValues); + // 2.15) + if (term != null) { + return term; + } + } + + // 3) + if (relativeToVocab && this.containsKey(JsonLdConsts.VOCAB)) { + // determine if vocab is a prefix of the iri + final String vocab = (String) this.get(JsonLdConsts.VOCAB); + // 3.1) + if (iri.indexOf(vocab) == 0 && !iri.equals(vocab)) { + // use suffix as relative iri if it is not a term in the + // active context + final String suffix = iri.substring(vocab.length()); + if (!termDefinitions.containsKey(suffix)) { + return suffix; + } + } + } + + // 4) + String compactIRI = null; + // 5) + for (final String term : termDefinitions.keySet()) { + final Map termDefinition = (Map) termDefinitions + .get(term); + // 5.1) + if (term.contains(":")) { + continue; + } + // 5.2) + if (termDefinition == null || iri.equals(termDefinition.get(JsonLdConsts.ID)) + || !iri.startsWith((String) termDefinition.get(JsonLdConsts.ID))) { + continue; + } + + // 5.3) + final String candidate = term + ":" + + iri.substring(((String) termDefinition.get(JsonLdConsts.ID)).length()); + // 5.4) + compactIRI = _iriCompactionStep5point4(iri, value, compactIRI, candidate, + termDefinitions); + } + + // 6) + if (compactIRI != null) { + return compactIRI; + } + + // 7) + if (!relativeToVocab) { + return JsonLdUrl.removeBase(this.get(JsonLdConsts.BASE), iri); + } + + // 8) + return iri; + } + + /* + * This method is only visible for testing. + */ + public static String _iriCompactionStep5point4(String iri, Object value, String compactIRI, + final String candidate, Map termDefinitions) { + + final boolean condition1 = (compactIRI == null + || compareShortestLeast(candidate, compactIRI) < 0); + + final boolean condition2 = (!termDefinitions.containsKey(candidate) || (iri + .equals(((Map) termDefinitions.get(candidate)).get(JsonLdConsts.ID)) + && value == null)); + + if (condition1 && condition2) { + compactIRI = candidate; + } + return compactIRI; + } + + /** + * Return a map of potential RDF prefixes based on the JSON-LD Term + * Definitions in this context. + *

+ * No guarantees of the prefixes are given, beyond that it will not contain + * ":". + * + * @param onlyCommonPrefixes + * If true, the result will not include "not so + * useful" prefixes, such as "term1": "http://example.com/term1", + * e.g. all IRIs will end with "/" or "#". If false, + * all potential prefixes are returned. + * + * @return A map from prefix string to IRI string + */ + public Map getPrefixes(boolean onlyCommonPrefixes) { + final Map prefixes = new LinkedHashMap(); + for (final String term : termDefinitions.keySet()) { + if (term.contains(":")) { + continue; + } + final Map termDefinition = (Map) termDefinitions + .get(term); + if (termDefinition == null) { + continue; + } + final String id = (String) termDefinition.get(JsonLdConsts.ID); + if (id == null) { + continue; + } + if (term.startsWith("@") || id.startsWith("@")) { + continue; + } + if (!onlyCommonPrefixes || id.endsWith("/") || id.endsWith("#")) { + prefixes.put(term, id); + } + } + return prefixes; + } + + String compactIri(String iri, boolean relativeToVocab) { + return compactIri(iri, null, relativeToVocab, false); + } + + String compactIri(String iri) { + return compactIri(iri, null, false, false); + } + + @Override + public Context clone() { + final Context rval = (Context) super.clone(); + // TODO: is this shallow copy enough? probably not, but it passes all + // the tests! + rval.termDefinitions = new LinkedHashMap(this.termDefinitions); + return rval; + } + + /** + * Inverse Context Creation + * + * http://json-ld.org/spec/latest/json-ld-api/#inverse-context-creation + * + * Generates an inverse context for use in the compaction algorithm, if not + * already generated for the given active context. + * + * @return the inverse context. + */ + public Map getInverse() { + + // lazily create inverse + if (inverse != null) { + return inverse; + } + + // 1) + inverse = newMap(); + + // 2) + String defaultLanguage = (String) this.get(JsonLdConsts.LANGUAGE); + if (defaultLanguage == null) { + defaultLanguage = JsonLdConsts.NONE; + } + + // create term selections for each mapping in the context, ordererd by + // shortest and then lexicographically least + final List terms = new ArrayList(termDefinitions.keySet()); + Collections.sort(terms, new Comparator() { + @Override + public int compare(String a, String b) { + return compareShortestLeast(a, b); + } + }); + + for (final String term : terms) { + final Map definition = (Map) termDefinitions.get(term); + // 3.1) + if (definition == null) { + continue; + } + + // 3.2) + String container = (String) definition.get(JsonLdConsts.CONTAINER); + if (container == null) { + container = JsonLdConsts.NONE; + } + + // 3.3) + final String iri = (String) definition.get(JsonLdConsts.ID); + + // 3.4 + 3.5) + Map containerMap = (Map) inverse.get(iri); + if (containerMap == null) { + containerMap = newMap(); + inverse.put(iri, containerMap); + } + + // 3.6 + 3.7) + Map typeLanguageMap = (Map) containerMap.get(container); + if (typeLanguageMap == null) { + typeLanguageMap = newMap(); + typeLanguageMap.put(JsonLdConsts.LANGUAGE, newMap()); + typeLanguageMap.put(JsonLdConsts.TYPE, newMap()); + containerMap.put(container, typeLanguageMap); + } + + // 3.8) + if (Boolean.TRUE.equals(definition.get(JsonLdConsts.REVERSE))) { + final Map typeMap = (Map) typeLanguageMap + .get(JsonLdConsts.TYPE); + if (!typeMap.containsKey(JsonLdConsts.REVERSE)) { + typeMap.put(JsonLdConsts.REVERSE, term); + } + // 3.9) + } else if (definition.containsKey(JsonLdConsts.TYPE)) { + final Map typeMap = (Map) typeLanguageMap + .get(JsonLdConsts.TYPE); + if (!typeMap.containsKey(definition.get(JsonLdConsts.TYPE))) { + typeMap.put((String) definition.get(JsonLdConsts.TYPE), term); + } + // 3.10) + } else if (definition.containsKey(JsonLdConsts.LANGUAGE)) { + final Map languageMap = (Map) typeLanguageMap + .get(JsonLdConsts.LANGUAGE); + String language = (String) definition.get(JsonLdConsts.LANGUAGE); + if (language == null) { + language = JsonLdConsts.NULL; + } + if (!languageMap.containsKey(language)) { + languageMap.put(language, term); + } + // 3.11) + } else { + // 3.11.1) + final Map languageMap = (Map) typeLanguageMap + .get(JsonLdConsts.LANGUAGE); + // 3.11.2) + if (!languageMap.containsKey(JsonLdConsts.LANGUAGE)) { + languageMap.put(JsonLdConsts.LANGUAGE, term); + } + // 3.11.3) + if (!languageMap.containsKey(JsonLdConsts.NONE)) { + languageMap.put(JsonLdConsts.NONE, term); + } + // 3.11.4) + final Map typeMap = (Map) typeLanguageMap + .get(JsonLdConsts.TYPE); + // 3.11.5) + if (!typeMap.containsKey(JsonLdConsts.NONE)) { + typeMap.put(JsonLdConsts.NONE, term); + } + } + } + // 4) + return inverse; + } + + /** + * Term Selection + * + * http://json-ld.org/spec/latest/json-ld-api/#term-selection + * + * This algorithm, invoked via the IRI Compaction algorithm, makes use of an + * active context's inverse context to find the term that is best used to + * compact an IRI. Other information about a value associated with the IRI + * is given, including which container mappings and which type mapping or + * language mapping would be best used to express the value. + * + * @return the selected term. + */ + private String selectTerm(String iri, List containers, String typeLanguage, + List preferredValues) { + final Map inv = getInverse(); + // 1) + final Map containerMap = (Map) inv.get(iri); + // 2) + for (final String container : containers) { + // 2.1) + if (!containerMap.containsKey(container)) { + continue; + } + // 2.2) + final Map typeLanguageMap = (Map) containerMap + .get(container); + // 2.3) + final Map valueMap = (Map) typeLanguageMap + .get(typeLanguage); + // 2.4 ) + for (final String item : preferredValues) { + // 2.4.1 + if (!valueMap.containsKey(item)) { + continue; + } + // 2.4.2 + return (String) valueMap.get(item); + } + } + // 3) + return null; + } + + /** + * Retrieve container mapping. + * + * @param property + * The Property to get a container mapping for. + * @return The container mapping if any, else null + */ + public String getContainer(String property) { + if (property == null) { + return null; + } + if (JsonLdConsts.GRAPH.equals(property)) { + return JsonLdConsts.SET; + } + if (!property.equals(JsonLdConsts.TYPE) && JsonLdUtils.isKeyword(property)) { + return property; + } + final Map td = (Map) termDefinitions.get(property); + if (td == null) { + return null; + } + return (String) td.get(JsonLdConsts.CONTAINER); + } + + public Boolean isReverseProperty(String property) { + final Map td = (Map) termDefinitions.get(property); + if (td == null) { + return false; + } + final Object reverse = td.get(JsonLdConsts.REVERSE); + return reverse != null && (Boolean) reverse; + } + + public String getTypeMapping(String property) { + final Map td = (Map) termDefinitions.get(property); + if (td == null) { + return null; + } + return (String) td.get(JsonLdConsts.TYPE); + } + + public String getLanguageMapping(String property) { + final Map td = (Map) termDefinitions.get(property); + if (td == null) { + return null; + } + return (String) td.get(JsonLdConsts.LANGUAGE); + } + + Map getTermDefinition(String key) { + return ((Map) termDefinitions.get(key)); + } + + public Object expandValue(String activeProperty, Object value) throws JsonLdError { + final Map rval = newMap(); + final Map td = getTermDefinition(activeProperty); + // 1) + if (td != null && JsonLdConsts.ID.equals(td.get(JsonLdConsts.TYPE))) { + // TODO: i'm pretty sure value should be a string if the @type is + // @id + rval.put(JsonLdConsts.ID, expandIri(value.toString(), true, false, null, null)); + return rval; + } + // 2) + if (td != null && JsonLdConsts.VOCAB.equals(td.get(JsonLdConsts.TYPE))) { + // TODO: same as above + rval.put(JsonLdConsts.ID, expandIri(value.toString(), true, true, null, null)); + return rval; + } + // 3) + rval.put(JsonLdConsts.VALUE, value); + // 4) + if (td != null && td.containsKey(JsonLdConsts.TYPE)) { + rval.put(JsonLdConsts.TYPE, td.get(JsonLdConsts.TYPE)); + } + // 5) + else if (value instanceof String) { + // 5.1) + if (td != null && td.containsKey(JsonLdConsts.LANGUAGE)) { + final String lang = (String) td.get(JsonLdConsts.LANGUAGE); + if (lang != null) { + rval.put(JsonLdConsts.LANGUAGE, lang); + } + } + // 5.2) + else if (this.get(JsonLdConsts.LANGUAGE) != null) { + rval.put(JsonLdConsts.LANGUAGE, this.get(JsonLdConsts.LANGUAGE)); + } + } + return rval; + } + + public Map serialize() { + final Map ctx = newMap(); + if (this.get(JsonLdConsts.BASE) != null + && !this.get(JsonLdConsts.BASE).equals(options.getBase())) { + ctx.put(JsonLdConsts.BASE, this.get(JsonLdConsts.BASE)); + } + if (this.get(JsonLdConsts.LANGUAGE) != null) { + ctx.put(JsonLdConsts.LANGUAGE, this.get(JsonLdConsts.LANGUAGE)); + } + if (this.get(JsonLdConsts.VOCAB) != null) { + ctx.put(JsonLdConsts.VOCAB, this.get(JsonLdConsts.VOCAB)); + } + for (final String term : termDefinitions.keySet()) { + final Map definition = (Map) termDefinitions.get(term); + if (definition.get(JsonLdConsts.LANGUAGE) == null + && definition.get(JsonLdConsts.CONTAINER) == null + && definition.get(JsonLdConsts.TYPE) == null + && (definition.get(JsonLdConsts.REVERSE) == null + || Boolean.FALSE.equals(definition.get(JsonLdConsts.REVERSE)))) { + final String cid = this.compactIri((String) definition.get(JsonLdConsts.ID)); + ctx.put(term, term.equals(cid) ? definition.get(JsonLdConsts.ID) : cid); + } else { + final Map defn = newMap(); + final String cid = this.compactIri((String) definition.get(JsonLdConsts.ID)); + final Boolean reverseProperty = Boolean.TRUE + .equals(definition.get(JsonLdConsts.REVERSE)); + if (!(term.equals(cid) && !reverseProperty)) { + defn.put(reverseProperty ? JsonLdConsts.REVERSE : JsonLdConsts.ID, cid); + } + final String typeMapping = (String) definition.get(JsonLdConsts.TYPE); + if (typeMapping != null) { + defn.put(JsonLdConsts.TYPE, JsonLdUtils.isKeyword(typeMapping) ? typeMapping + : compactIri(typeMapping, true)); + } + if (definition.get(JsonLdConsts.CONTAINER) != null) { + defn.put(JsonLdConsts.CONTAINER, definition.get(JsonLdConsts.CONTAINER)); + } + final Object lang = definition.get(JsonLdConsts.LANGUAGE); + if (definition.get(JsonLdConsts.LANGUAGE) != null) { + defn.put(JsonLdConsts.LANGUAGE, Boolean.FALSE.equals(lang) ? null : lang); + } + ctx.put(term, defn); + } + } + + final Map rval = newMap(); + if (!(ctx == null || ctx.isEmpty())) { + rval.put(JsonLdConsts.CONTEXT, ctx); + } + return rval; + } + +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/DocumentLoader.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/DocumentLoader.java new file mode 100644 index 0000000..06f3866 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/DocumentLoader.java @@ -0,0 +1,58 @@ +package com.github.jsonldjava.core; + +import com.github.jsonldjava.utils.JsonUtils; + +import java.net.URL; + +/** + * Resolves URLs to {@link RemoteDocument}s. Subclass this class to change the + * behaviour of loadDocument to suit your purposes. + */ +public class DocumentLoader { + + //private final Map m_injectedDocs = new HashMap<>(); + + /** + * Identifies a system property that can be set to "true" in order to + * disallow remote context loading. + */ + public static final String DISALLOW_REMOTE_CONTEXT_LOADING = "com.github.jsonldjava.disallowRemoteContextLoading"; + + + /** + * Loads the URL if possible, returning it as a RemoteDocument. + * + * @param url + * The URL to load + * @return The resolved URL as a RemoteDocument + * @throws JsonLdError + * If there are errors loading or remote context loading has + * been disallowed. + */ + public RemoteDocument loadDocument(String url) throws JsonLdError { + + final String disallowRemote = System + .getProperty(DocumentLoader.DISALLOW_REMOTE_CONTEXT_LOADING); + + if ("true".equalsIgnoreCase(disallowRemote)) { + throw new JsonLdError(JsonLdError.Error.LOADING_REMOTE_CONTEXT_FAILED, "Remote context loading has been disallowed (url was " + url + ")"); + } + + final RemoteDocument doc = new RemoteDocument(url, null); + try { + doc.setDocument(JsonUtils.fromURL(new URL(url)/*, getHttpClient()*/)); + } catch (final Exception e) { + throw new JsonLdError(JsonLdError.Error.LOADING_REMOTE_CONTEXT_FAILED, url, e); + } + return doc; + } + + /** + * An HTTP Accept header that prefers JSONLD. + * + * @deprecated Use {@link JsonUtils#ACCEPT_HEADER} instead. + */ + @Deprecated + public static final String ACCEPT_HEADER = JsonUtils.ACCEPT_HEADER; + +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/IdentifierIssuer.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/IdentifierIssuer.java new file mode 100644 index 0000000..f73212c --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/IdentifierIssuer.java @@ -0,0 +1,112 @@ +package com.github.jsonldjava.core; + + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/* + * An IdentifierIssuer issues unique identifiers, keeping track of any + * previously issued identifiers. + */ +public class IdentifierIssuer implements Cloneable { + + private String prefix; + private int counter; + private Map existing; + private List order; + + public IdentifierIssuer(String prefix) { + /* + * Initializes a new IdentifierIssuer. + * :param prefix: the prefix to use (''). + */ + + this.prefix = prefix; + this.counter = 0; + this.existing = new HashMap<>(); + this.order = new ArrayList<>(); + + /* + * Gets the new identifier for the given old identifier, where if no old + * identifier is given a new identifier will be generated. + * :param [old]: the old identifier to get the new identifier for. + * :return: the new identifier. + */ + } + + public String getId() { + return this.getId(null); + } + + public String getId(String old) { + + if(old != null && existing.containsKey(old)) { + return existing.get(old); + } + + String id = this.prefix + Integer.toString(counter); + this.counter += 1; + + if(old != null) { + /* + * Returns True if the given old identifier has already been assigned a + * new identifier. + * :param old: the old identifier to check. + * :return: True if the old identifier has been assigned a new identifier, + * False if not. + */ + + this.existing.put(old, id); + this.order.add(old); + } + + return id; + + } + + public boolean hasID (String old) { + return this.existing.containsKey(old); + + } + + public List getOrder() { + return this.order; + } + + public String getPrefix() { + return this.prefix; + } + + public Object clone() { + try { + return super.clone(); + } catch (CloneNotSupportedException e) { + e.printStackTrace(); + return null; + } + } +} + + + + + + + + + + + + + + + + + + + + + + diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdApi.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdApi.java new file mode 100644 index 0000000..3092d69 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdApi.java @@ -0,0 +1,2058 @@ +package com.github.jsonldjava.core; + +import com.github.jsonldjava.core.JsonLdError.Error; +import com.github.jsonldjava.utils.Obj; + +import java.util.*; +import java.util.logging.Logger; + +import static com.github.jsonldjava.core.JsonLdConsts.*; +import static com.github.jsonldjava.core.JsonLdUtils.isKeyword; +import static com.github.jsonldjava.utils.Obj.newMap; + + +public class JsonLdApi { + + private final Logger log = null; + + JsonLdOptions opts; + Object value = null; + Context context = null; + + /** + * Constructs an empty JsonLdApi object using the default JsonLdOptions, and + * without initialization. + */ + public JsonLdApi() { + this(new JsonLdOptions("")); + } + + /** + * Constructs a JsonLdApi object using the given object as the initial + * JSON-LD object, and the given JsonLdOptions. + * + * @param input The initial JSON-LD object. + * @param opts The JsonLdOptions to use. + * @throws JsonLdError If there is an error initializing using the object and + * options. + */ + public JsonLdApi(Object input, JsonLdOptions opts) throws JsonLdError { + this(opts); + initialize(input, null); + } + + /** + * Constructs a JsonLdApi object using the given object as the initial + * JSON-LD object, the given context, and the given JsonLdOptions. + * + * @param input The initial JSON-LD object. + * @param context The initial context. + * @param opts The JsonLdOptions to use. + * @throws JsonLdError If there is an error initializing using the object and + * options. + */ + public JsonLdApi(Object input, Object context, JsonLdOptions opts) throws JsonLdError { + this(opts); + initialize(input, null); + } + + /** + * Constructs an empty JsonLdApi object using the given JsonLdOptions, and + * without initialization.
+ * If the JsonLdOptions parameter is null, then the default options are + * used. + * + * @param opts The JsonLdOptions to use. + */ + public JsonLdApi(JsonLdOptions opts) { + if (opts == null) { + opts = new JsonLdOptions(""); + } else { + this.opts = opts; + } + } + + /** + * @param input The initial object, which is to be cloned and used in + * operations. + * @param context The context object, which is to be parsed and used in + * operations. + * @throws JsonLdError If there was an error cloning the object, or in parsing the + * context. + */ + private void initialize(Object input, Object context) throws JsonLdError { + if (input instanceof List || input instanceof Map) { + this.value = JsonLdUtils.clone(input); + } + // TODO: string/IO input + this.context = new Context(opts); + if (context != null) { + this.context = this.context.parse(context); + } + } + + /*** + * ____ _ _ _ _ _ _ / ___|___ _ __ ___ _ __ __ _ ___| |_ / \ | | __ _ ___ _ + * __(_) |_| |__ _ __ ___ | | / _ \| '_ ` _ \| '_ \ / _` |/ __| __| / _ \ | + * |/ _` |/ _ \| '__| | __| '_ \| '_ ` _ \ | |__| (_) | | | | | | |_) | (_| + * | (__| |_ / ___ \| | (_| | (_) | | | | |_| | | | | | | | | \____\___/|_| + * |_| |_| .__/ \__,_|\___|\__| /_/ \_\_|\__, |\___/|_| |_|\__|_| |_|_| |_| + * |_| |_| |___/ + */ + + /** + * Compaction Algorithm + *

+ * http://json-ld.org/spec/latest/json-ld-api/#compaction-algorithm + * + * @param activeCtx The Active Context + * @param activeProperty The Active Property + * @param element The current element + * @param compactArrays True to compact arrays. + * @return The compacted JSON-LD object. + * @throws JsonLdError If there was an error during compaction. + */ + public Object compact(Context activeCtx, String activeProperty, Object element, + boolean compactArrays) throws JsonLdError { + // 2) + if (element instanceof List) { + // 2.1) + final List result = new ArrayList(); + // 2.2) + for (final Object item : (List) element) { + // 2.2.1) + final Object compactedItem = compact(activeCtx, activeProperty, item, + compactArrays); + // 2.2.2) + if (compactedItem != null) { + result.add(compactedItem); + } + } + // 2.3) + if (compactArrays && result.size() == 1 + && activeCtx.getContainer(activeProperty) == null) { + return result.get(0); + } + // 2.4) + return result; + } + + // 3) + if (element instanceof Map) { + // access helper + final Map elem = (Map) element; + + // 4 + if (elem.containsKey(JsonLdConsts.VALUE) || elem.containsKey(JsonLdConsts.ID)) { + final Object compactedValue = activeCtx.compactValue(activeProperty, elem); + if (!(compactedValue instanceof Map || compactedValue instanceof List)) { + return compactedValue; + } + } + // 5) + final boolean insideReverse = (JsonLdConsts.REVERSE.equals(activeProperty)); + + // 6) + final Map result = newMap(); + // 7) + final List keys = new ArrayList(elem.keySet()); + Collections.sort(keys); + for (final String expandedProperty : keys) { + final Object expandedValue = elem.get(expandedProperty); + + // 7.1) + if (JsonLdConsts.ID.equals(expandedProperty) + || JsonLdConsts.TYPE.equals(expandedProperty)) { + Object compactedValue; + + // 7.1.1) + if (expandedValue instanceof String) { + compactedValue = activeCtx.compactIri((String) expandedValue, + JsonLdConsts.TYPE.equals(expandedProperty)); + } + // 7.1.2) + else { + final List types = new ArrayList(); + // 7.1.2.2) + for (final String expandedType : (List) expandedValue) { + types.add(activeCtx.compactIri(expandedType, true)); + } + // 7.1.2.3) + if (types.size() == 1) { + compactedValue = types.get(0); + } else { + compactedValue = types; + } + } + + // 7.1.3) + final String alias = activeCtx.compactIri(expandedProperty, true); + // 7.1.4) + result.put(alias, compactedValue); + continue; + // TODO: old add value code, see if it's still relevant? + // addValue(rval, alias, compactedValue, + // isArray(compactedValue) + // && ((List) expandedValue).size() == 0); + } + + // 7.2) + if (JsonLdConsts.REVERSE.equals(expandedProperty)) { + // 7.2.1) + final Map compactedValue = (Map) compact( + activeCtx, JsonLdConsts.REVERSE, expandedValue, compactArrays); + + // 7.2.2) + // Note: Must create a new set to avoid modifying the set we + // are iterating over + for (final String property : new HashSet(compactedValue.keySet())) { + final Object value = compactedValue.get(property); + // 7.2.2.1) + if (activeCtx.isReverseProperty(property)) { + // 7.2.2.1.1) + if ((JsonLdConsts.SET.equals(activeCtx.getContainer(property)) + || !compactArrays) && !(value instanceof List)) { + final List tmp = new ArrayList(); + tmp.add(value); + result.put(property, tmp); + } + // 7.2.2.1.2) + if (!result.containsKey(property)) { + result.put(property, value); + } + // 7.2.2.1.3) + else { + if (!(result.get(property) instanceof List)) { + final List tmp = new ArrayList(); + tmp.add(result.put(property, tmp)); + } + if (value instanceof List) { + ((List) result.get(property)) + .addAll((List) value); + } else { + ((List) result.get(property)).add(value); + } + } + // 7.2.2.1.4) + compactedValue.remove(property); + } + } + // 7.2.3) + if (!compactedValue.isEmpty()) { + // 7.2.3.1) + final String alias = activeCtx.compactIri(JsonLdConsts.REVERSE, true); + // 7.2.3.2) + result.put(alias, compactedValue); + } + // 7.2.4) + continue; + } + + // 7.3) + if (JsonLdConsts.INDEX.equals(expandedProperty) + && JsonLdConsts.INDEX.equals(activeCtx.getContainer(activeProperty))) { + continue; + } + // 7.4) + else if (JsonLdConsts.INDEX.equals(expandedProperty) + || JsonLdConsts.VALUE.equals(expandedProperty) + || JsonLdConsts.LANGUAGE.equals(expandedProperty)) { + // 7.4.1) + final String alias = activeCtx.compactIri(expandedProperty, true); + // 7.4.2) + result.put(alias, expandedValue); + continue; + } + + // NOTE: expanded value must be an array due to expansion + // algorithm. + + // 7.5) + if (((List) expandedValue).size() == 0) { + // 7.5.1) + final String itemActiveProperty = activeCtx.compactIri(expandedProperty, + expandedValue, true, insideReverse); + // 7.5.2) + if (!result.containsKey(itemActiveProperty)) { + result.put(itemActiveProperty, new ArrayList()); + } else { + final Object value = result.get(itemActiveProperty); + if (!(value instanceof List)) { + final List tmp = new ArrayList(); + tmp.add(value); + result.put(itemActiveProperty, tmp); + } + } + } + + // 7.6) + for (final Object expandedItem : (List) expandedValue) { + // 7.6.1) + final String itemActiveProperty = activeCtx.compactIri(expandedProperty, + expandedItem, true, insideReverse); + // 7.6.2) + final String container = activeCtx.getContainer(itemActiveProperty); + + // get @list value if appropriate + final boolean isList = (expandedItem instanceof Map + && ((Map) expandedItem).containsKey(JsonLdConsts.LIST)); + Object list = null; + if (isList) { + list = ((Map) expandedItem).get(JsonLdConsts.LIST); + } + + // 7.6.3) + Object compactedItem = compact(activeCtx, itemActiveProperty, + isList ? list : expandedItem, compactArrays); + + // 7.6.4) + if (isList) { + // 7.6.4.1) + if (!(compactedItem instanceof List)) { + final List tmp = new ArrayList(); + tmp.add(compactedItem); + compactedItem = tmp; + } + // 7.6.4.2) + if (!JsonLdConsts.LIST.equals(container)) { + // 7.6.4.2.1) + final Map wrapper = newMap(); + // TODO: SPEC: no mention of vocab = true + wrapper.put(activeCtx.compactIri(JsonLdConsts.LIST, true), + compactedItem); + compactedItem = wrapper; + + // 7.6.4.2.2) + if (((Map) expandedItem) + .containsKey(JsonLdConsts.INDEX)) { + ((Map) compactedItem).put( + // TODO: SPEC: no mention of vocab = + // true + activeCtx.compactIri(JsonLdConsts.INDEX, true), + ((Map) expandedItem) + .get(JsonLdConsts.INDEX)); + } + } + // 7.6.4.3) + else if (result.containsKey(itemActiveProperty)) { + throw new JsonLdError(Error.COMPACTION_TO_LIST_OF_LISTS, + "There cannot be two list objects associated with an active property that has a container mapping"); + } + } + + // 7.6.5) + if (JsonLdConsts.LANGUAGE.equals(container) + || JsonLdConsts.INDEX.equals(container)) { + // 7.6.5.1) + Map mapObject; + if (result.containsKey(itemActiveProperty)) { + mapObject = (Map) result.get(itemActiveProperty); + } else { + mapObject = newMap(); + result.put(itemActiveProperty, mapObject); + } + + // 7.6.5.2) + if (JsonLdConsts.LANGUAGE.equals(container) && (compactedItem instanceof Map + && ((Map) compactedItem) + .containsKey(JsonLdConsts.VALUE))) { + compactedItem = ((Map) compactedItem) + .get(JsonLdConsts.VALUE); + } + + // 7.6.5.3) + final String mapKey = (String) ((Map) expandedItem) + .get(container); + // 7.6.5.4) + if (!mapObject.containsKey(mapKey)) { + mapObject.put(mapKey, compactedItem); + } else { + List tmp; + if (!(mapObject.get(mapKey) instanceof List)) { + tmp = new ArrayList(); + tmp.add(mapObject.put(mapKey, tmp)); + } else { + tmp = (List) mapObject.get(mapKey); + } + tmp.add(compactedItem); + } + } + // 7.6.6) + else { + // 7.6.6.1) + final Boolean check = (!compactArrays || JsonLdConsts.SET.equals(container) + || JsonLdConsts.LIST.equals(container) + || JsonLdConsts.LIST.equals(expandedProperty) + || JsonLdConsts.GRAPH.equals(expandedProperty)) + && (!(compactedItem instanceof List)); + if (check) { + final List tmp = new ArrayList(); + tmp.add(compactedItem); + compactedItem = tmp; + } + // 7.6.6.2) + if (!result.containsKey(itemActiveProperty)) { + result.put(itemActiveProperty, compactedItem); + } else { + if (!(result.get(itemActiveProperty) instanceof List)) { + final List tmp = new ArrayList(); + tmp.add(result.put(itemActiveProperty, tmp)); + } + if (compactedItem instanceof List) { + ((List) result.get(itemActiveProperty)) + .addAll((List) compactedItem); + } else { + ((List) result.get(itemActiveProperty)).add(compactedItem); + } + } + + } + } + } + // 8) + return result; + } + + // 2) + return element; + } + + /** + * Compaction Algorithm + *

+ * http://json-ld.org/spec/latest/json-ld-api/#compaction-algorithm + * + * @param activeCtx The Active Context + * @param activeProperty The Active Property + * @param element The current element + * @return The compacted JSON-LD object. + * @throws JsonLdError If there was an error during compaction. + */ + public Object compact(Context activeCtx, String activeProperty, Object element) + throws JsonLdError { + return compact(activeCtx, activeProperty, element, JsonLdOptions.DEFAULT_COMPACT_ARRAYS); + } + + /*** + * _____ _ _ _ _ _ _ | ____|_ ___ __ __ _ _ __ __| | / \ | | __ _ ___ _ + * __(_) |_| |__ _ __ ___ | _| \ \/ / '_ \ / _` | '_ \ / _` | / _ \ | |/ _` + * |/ _ \| '__| | __| '_ \| '_ ` _ \ | |___ > <| |_) | (_| | | | | (_| | / + * ___ \| | (_| | (_) | | | | |_| | | | | | | | | |_____/_/\_\ .__/ \__,_|_| + * |_|\__,_| /_/ \_\_|\__, |\___/|_| |_|\__|_| |_|_| |_| |_| |_| |___/ + */ + + /** + * Expansion Algorithm + *

+ * http://json-ld.org/spec/latest/json-ld-api/#expansion-algorithm + * + * @param activeCtx The Active Context + * @param activeProperty The Active Property + * @param element The current element + * @return The expanded JSON-LD object. + * @throws JsonLdError If there was an error during expansion. + */ + public Object expand(Context activeCtx, String activeProperty, Object element) + throws JsonLdError { + // 1) + if (element == null) { + return null; + } + + // 3) + if (element instanceof List) { + // 3.1) + final List result = new ArrayList(); + // 3.2) + for (final Object item : (List) element) { + // 3.2.1) + final Object v = expand(activeCtx, activeProperty, item); + // 3.2.2) + if ((JsonLdConsts.LIST.equals(activeProperty) + || JsonLdConsts.LIST.equals(activeCtx.getContainer(activeProperty))) + && (v instanceof List || (v instanceof Map + && ((Map) v).containsKey(JsonLdConsts.LIST)))) { + throw new JsonLdError(Error.LIST_OF_LISTS, "lists of lists are not permitted."); + } + // 3.2.3) + else if (v != null) { + if (v instanceof List) { + result.addAll((Collection) v); + } else { + result.add(v); + } + } + } + // 3.3) + return result; + } + // 4) + else if (element instanceof Map) { + // access helper + final Map elem = (Map) element; + // 5) + if (elem.containsKey(JsonLdConsts.CONTEXT)) { + activeCtx = activeCtx.parse(elem.get(JsonLdConsts.CONTEXT)); + } + // 6) + Map result = newMap(); + // 7) + final List keys = new ArrayList(elem.keySet()); + Collections.sort(keys); + for (final String key : keys) { + final Object value = elem.get(key); + // 7.1) + if (key.equals(JsonLdConsts.CONTEXT)) { + continue; + } + // 7.2) + final String expandedProperty = activeCtx.expandIri(key, false, true, null, null); + Object expandedValue = null; + // 7.3) + if (expandedProperty == null + || (!expandedProperty.contains(":") && !isKeyword(expandedProperty))) { + continue; + } + // 7.4) + if (isKeyword(expandedProperty)) { + // 7.4.1) + if (JsonLdConsts.REVERSE.equals(activeProperty)) { + throw new JsonLdError(Error.INVALID_REVERSE_PROPERTY_MAP, + "a keyword cannot be used as a @reverse propery"); + } + // 7.4.2) + if (result.containsKey(expandedProperty)) { + throw new JsonLdError(Error.COLLIDING_KEYWORDS, + expandedProperty + " already exists in result"); + } + // 7.4.3) + if (JsonLdConsts.ID.equals(expandedProperty)) { + if (!(value instanceof String)) { + throw new JsonLdError(Error.INVALID_ID_VALUE, + "value of @id must be a string"); + } + expandedValue = activeCtx.expandIri((String) value, true, false, null, + null); + } + // 7.4.4) + else if (JsonLdConsts.TYPE.equals(expandedProperty)) { + if (value instanceof List) { + expandedValue = new ArrayList(); + for (final Object v : (List) value) { + if (!(v instanceof String)) { + throw new JsonLdError(Error.INVALID_TYPE_VALUE, + "@type value must be a string or array of strings"); + } + ((List) expandedValue).add( + activeCtx.expandIri((String) v, true, true, null, null)); + } + } else if (value instanceof String) { + expandedValue = activeCtx.expandIri((String) value, true, true, null, + null); + } + // TODO: SPEC: no mention of empty map check + else if (value instanceof Map) { + if (((Map) value).size() != 0) { + throw new JsonLdError(Error.INVALID_TYPE_VALUE, + "@type value must be a an empty object for framing"); + } + expandedValue = value; + } else { + throw new JsonLdError(Error.INVALID_TYPE_VALUE, + "@type value must be a string or array of strings"); + } + } + // 7.4.5) + else if (JsonLdConsts.GRAPH.equals(expandedProperty)) { + expandedValue = expand(activeCtx, JsonLdConsts.GRAPH, value); + } + // 7.4.6) + else if (JsonLdConsts.VALUE.equals(expandedProperty)) { + if (value != null && (value instanceof Map || value instanceof List)) { + throw new JsonLdError(Error.INVALID_VALUE_OBJECT_VALUE, + "value of " + expandedProperty + " must be a scalar or null"); + } + expandedValue = value; + if (expandedValue == null) { + result.put(JsonLdConsts.VALUE, null); + continue; + } + } + // 7.4.7) + else if (JsonLdConsts.LANGUAGE.equals(expandedProperty)) { + if (!(value instanceof String)) { + throw new JsonLdError(Error.INVALID_LANGUAGE_TAGGED_STRING, + "Value of " + expandedProperty + " must be a string"); + } + expandedValue = ((String) value).toLowerCase(); + } + // 7.4.8) + else if (JsonLdConsts.INDEX.equals(expandedProperty)) { + if (!(value instanceof String)) { + throw new JsonLdError(Error.INVALID_INDEX_VALUE, + "Value of " + expandedProperty + " must be a string"); + } + expandedValue = value; + } + // 7.4.9) + else if (JsonLdConsts.LIST.equals(expandedProperty)) { + // 7.4.9.1) + if (activeProperty == null || JsonLdConsts.GRAPH.equals(activeProperty)) { + continue; + } + // 7.4.9.2) + expandedValue = expand(activeCtx, activeProperty, value); + + // NOTE: step not in the spec yet + if (!(expandedValue instanceof List)) { + final List tmp = new ArrayList(); + tmp.add(expandedValue); + expandedValue = tmp; + } + + // 7.4.9.3) + for (final Object o : (List) expandedValue) { + if (o instanceof Map + && ((Map) o).containsKey(JsonLdConsts.LIST)) { + throw new JsonLdError(Error.LIST_OF_LISTS, + "A list may not contain another list"); + } + } + } + // 7.4.10) + else if (JsonLdConsts.SET.equals(expandedProperty)) { + expandedValue = expand(activeCtx, activeProperty, value); + } + // 7.4.11) + else if (JsonLdConsts.REVERSE.equals(expandedProperty)) { + if (!(value instanceof Map)) { + throw new JsonLdError(Error.INVALID_REVERSE_VALUE, + "@reverse value must be an object"); + } + // 7.4.11.1) + expandedValue = expand(activeCtx, JsonLdConsts.REVERSE, value); + // NOTE: algorithm assumes the result is a map + // 7.4.11.2) + if (((Map) expandedValue) + .containsKey(JsonLdConsts.REVERSE)) { + final Map reverse = (Map) ((Map) expandedValue) + .get(JsonLdConsts.REVERSE); + for (final String property : reverse.keySet()) { + final Object item = reverse.get(property); + // 7.4.11.2.1) + if (!result.containsKey(property)) { + result.put(property, new ArrayList()); + } + // 7.4.11.2.2) + if (item instanceof List) { + ((List) result.get(property)) + .addAll((List) item); + } else { + ((List) result.get(property)).add(item); + } + } + } + // 7.4.11.3) + if (((Map) expandedValue) + .size() > (((Map) expandedValue) + .containsKey(JsonLdConsts.REVERSE) ? 1 : 0)) { + // 7.4.11.3.1) + if (!result.containsKey(JsonLdConsts.REVERSE)) { + result.put(JsonLdConsts.REVERSE, newMap()); + } + // 7.4.11.3.2) + final Map reverseMap = (Map) result + .get(JsonLdConsts.REVERSE); + // 7.4.11.3.3) + for (final String property : ((Map) expandedValue) + .keySet()) { + if (JsonLdConsts.REVERSE.equals(property)) { + continue; + } + // 7.4.11.3.3.1) + final List items = (List) ((Map) expandedValue) + .get(property); + for (final Object item : items) { + // 7.4.11.3.3.1.1) + if (item instanceof Map && (((Map) item) + .containsKey(JsonLdConsts.VALUE) + || ((Map) item) + .containsKey(JsonLdConsts.LIST))) { + throw new JsonLdError(Error.INVALID_REVERSE_PROPERTY_VALUE); + } + // 7.4.11.3.3.1.2) + if (!reverseMap.containsKey(property)) { + reverseMap.put(property, new ArrayList()); + } + // 7.4.11.3.3.1.3) + ((List) reverseMap.get(property)).add(item); + } + } + } + // 7.4.11.4) + continue; + } + // TODO: SPEC no mention of @explicit etc in spec + else if (JsonLdConsts.EXPLICIT.equals(expandedProperty) + || JsonLdConsts.DEFAULT.equals(expandedProperty) + || JsonLdConsts.EMBED.equals(expandedProperty) + || JsonLdConsts.EMBED_CHILDREN.equals(expandedProperty) + || JsonLdConsts.OMIT_DEFAULT.equals(expandedProperty)) { + expandedValue = expand(activeCtx, expandedProperty, value); + } + // 7.4.12) + if (expandedValue != null) { + result.put(expandedProperty, expandedValue); + } + // 7.4.13) + continue; + } + // 7.5 + else if (JsonLdConsts.LANGUAGE.equals(activeCtx.getContainer(key)) + && value instanceof Map) { + // 7.5.1) + expandedValue = new ArrayList(); + // 7.5.2) + for (final String language : ((Map) value).keySet()) { + Object languageValue = ((Map) value).get(language); + // 7.5.2.1) + if (!(languageValue instanceof List)) { + final Object tmp = languageValue; + languageValue = new ArrayList(); + ((List) languageValue).add(tmp); + } + // 7.5.2.2) + for (final Object item : (List) languageValue) { + // 7.5.2.2.1) + if (!(item instanceof String)) { + throw new JsonLdError(Error.INVALID_LANGUAGE_MAP_VALUE, + "Expected " + item.toString() + " to be a string"); + } + // 7.5.2.2.2) + final Map tmp = newMap(); + tmp.put(JsonLdConsts.VALUE, item); + tmp.put(JsonLdConsts.LANGUAGE, language.toLowerCase()); + ((List) expandedValue).add(tmp); + } + } + } + // 7.6) + else if (JsonLdConsts.INDEX.equals(activeCtx.getContainer(key)) + && value instanceof Map) { + // 7.6.1) + expandedValue = new ArrayList(); + // 7.6.2) + final List indexKeys = new ArrayList( + ((Map) value).keySet()); + Collections.sort(indexKeys); + for (final String index : indexKeys) { + Object indexValue = ((Map) value).get(index); + // 7.6.2.1) + if (!(indexValue instanceof List)) { + final Object tmp = indexValue; + indexValue = new ArrayList(); + ((List) indexValue).add(tmp); + } + // 7.6.2.2) + indexValue = expand(activeCtx, key, indexValue); + // 7.6.2.3) + for (final Map item : (List>) indexValue) { + // 7.6.2.3.1) + if (!item.containsKey(JsonLdConsts.INDEX)) { + item.put(JsonLdConsts.INDEX, index); + } + // 7.6.2.3.2) + ((List) expandedValue).add(item); + } + } + } + // 7.7) + else { + expandedValue = expand(activeCtx, key, value); + } + // 7.8) + if (expandedValue == null) { + continue; + } + // 7.9) + if (JsonLdConsts.LIST.equals(activeCtx.getContainer(key))) { + if (!(expandedValue instanceof Map) || !((Map) expandedValue) + .containsKey(JsonLdConsts.LIST)) { + Object tmp = expandedValue; + if (!(tmp instanceof List)) { + tmp = new ArrayList(); + ((List) tmp).add(expandedValue); + } + expandedValue = newMap(); + ((Map) expandedValue).put(JsonLdConsts.LIST, tmp); + } + } + // 7.10) + if (activeCtx.isReverseProperty(key)) { + // 7.10.1) + if (!result.containsKey(JsonLdConsts.REVERSE)) { + result.put(JsonLdConsts.REVERSE, newMap()); + } + // 7.10.2) + final Map reverseMap = (Map) result + .get(JsonLdConsts.REVERSE); + // 7.10.3) + if (!(expandedValue instanceof List)) { + final Object tmp = expandedValue; + expandedValue = new ArrayList(); + ((List) expandedValue).add(tmp); + } + // 7.10.4) + for (final Object item : (List) expandedValue) { + // 7.10.4.1) + if (item instanceof Map && (((Map) item) + .containsKey(JsonLdConsts.VALUE) + || ((Map) item).containsKey(JsonLdConsts.LIST))) { + throw new JsonLdError(Error.INVALID_REVERSE_PROPERTY_VALUE); + } + // 7.10.4.2) + if (!reverseMap.containsKey(expandedProperty)) { + reverseMap.put(expandedProperty, new ArrayList()); + } + // 7.10.4.3) + if (item instanceof List) { + ((List) reverseMap.get(expandedProperty)) + .addAll((List) item); + } else { + ((List) reverseMap.get(expandedProperty)).add(item); + } + } + } + // 7.11) + else { + // 7.11.1) + if (!result.containsKey(expandedProperty)) { + result.put(expandedProperty, new ArrayList()); + } + // 7.11.2) + if (expandedValue instanceof List) { + ((List) result.get(expandedProperty)) + .addAll((List) expandedValue); + } else { + ((List) result.get(expandedProperty)).add(expandedValue); + } + } + } + // 8) + if (result.containsKey(JsonLdConsts.VALUE)) { + // 8.1) + // TODO: is this method faster than just using containsKey for + // each? + final Set keySet = new HashSet(result.keySet()); + keySet.remove(JsonLdConsts.VALUE); + keySet.remove(JsonLdConsts.INDEX); + final boolean langremoved = keySet.remove(JsonLdConsts.LANGUAGE); + final boolean typeremoved = keySet.remove(JsonLdConsts.TYPE); + if ((langremoved && typeremoved) || !keySet.isEmpty()) { + throw new JsonLdError(Error.INVALID_VALUE_OBJECT, + "value object has unknown keys"); + } + // 8.2) + final Object rval = result.get(JsonLdConsts.VALUE); + if (rval == null) { + // nothing else is possible with result if we set it to + // null, so simply return it + return null; + } + // 8.3) + if (!(rval instanceof String) && result.containsKey(JsonLdConsts.LANGUAGE)) { + throw new JsonLdError(Error.INVALID_LANGUAGE_TAGGED_VALUE, + "when @language is used, @value must be a string"); + } + // 8.4) + else if (result.containsKey(JsonLdConsts.TYPE)) { + // TODO: is this enough for "is an IRI" + if (!(result.get(JsonLdConsts.TYPE) instanceof String) + || ((String) result.get(JsonLdConsts.TYPE)).startsWith("_:") + || !((String) result.get(JsonLdConsts.TYPE)).contains(":")) { + throw new JsonLdError(Error.INVALID_TYPED_VALUE, + "value of @type must be an IRI"); + } + } + } + // 9) + else if (result.containsKey(JsonLdConsts.TYPE)) { + final Object rtype = result.get(JsonLdConsts.TYPE); + if (!(rtype instanceof List)) { + final List tmp = new ArrayList(); + tmp.add(rtype); + result.put(JsonLdConsts.TYPE, tmp); + } + } + // 10) + else if (result.containsKey(JsonLdConsts.SET) + || result.containsKey(JsonLdConsts.LIST)) { + // 10.1) + if (result.size() > (result.containsKey(JsonLdConsts.INDEX) ? 2 : 1)) { + throw new JsonLdError(Error.INVALID_SET_OR_LIST_OBJECT, + "@set or @list may only contain @index"); + } + // 10.2) + if (result.containsKey(JsonLdConsts.SET)) { + // result becomes an array here, thus the remaining checks + // will never be true from here on + // so simply return the value rather than have to make + // result an object and cast it with every + // other use in the function. + return result.get(JsonLdConsts.SET); + } + } + // 11) + if (result.containsKey(JsonLdConsts.LANGUAGE) && result.size() == 1) { + result = null; + } + // 12) + if (activeProperty == null || JsonLdConsts.GRAPH.equals(activeProperty)) { + // 12.1) + if (result != null && (result.size() == 0 || result.containsKey(JsonLdConsts.VALUE) + || result.containsKey(JsonLdConsts.LIST))) { + result = null; + } + // 12.2) + else if (result != null && result.containsKey(JsonLdConsts.ID) + && result.size() == 1) { + result = null; + } + } + // 13) + return result; + } + // 2) If element is a scalar + else { + // 2.1) + if (activeProperty == null || JsonLdConsts.GRAPH.equals(activeProperty)) { + return null; + } + return activeCtx.expandValue(activeProperty, element); + } + } + + /** + * Expansion Algorithm + *

+ * http://json-ld.org/spec/latest/json-ld-api/#expansion-algorithm + * + * @param activeCtx The Active Context + * @param element The current element + * @return The expanded JSON-LD object. + * @throws JsonLdError If there was an error during expansion. + */ + public Object expand(Context activeCtx, Object element) throws JsonLdError { + return expand(activeCtx, null, element); + } + + /*** + * _____ _ _ _ _ _ _ _ _ | ___| | __ _| |_| |_ ___ _ __ / \ | | __ _ ___ _ + * __(_) |_| |__ _ __ ___ | |_ | |/ _` | __| __/ _ \ '_ \ / _ \ | |/ _` |/ _ + * \| '__| | __| '_ \| '_ ` _ \ | _| | | (_| | |_| || __/ | | | / ___ \| | + * (_| | (_) | | | | |_| | | | | | | | | |_| |_|\__,_|\__|\__\___|_| |_| /_/ + * \_\_|\__, |\___/|_| |_|\__|_| |_|_| |_| |_| |___/ + */ + + void generateNodeMap(Object element, Map nodeMap) throws JsonLdError { + generateNodeMap(element, nodeMap, JsonLdConsts.DEFAULT, null, null, null); + } + + void generateNodeMap(Object element, Map nodeMap, String activeGraph) + throws JsonLdError { + generateNodeMap(element, nodeMap, activeGraph, null, null, null); + } + + void generateNodeMap(Object element, Map nodeMap, String activeGraph, + Object activeSubject, String activeProperty, Map list) + throws JsonLdError { + // 1) + if (element instanceof List) { + // 1.1) + for (final Object item : (List) element) { + generateNodeMap(item, nodeMap, activeGraph, activeSubject, activeProperty, list); + } + return; + } + + // for convenience + final Map elem = (Map) element; + + // 2) + if (!nodeMap.containsKey(activeGraph)) { + nodeMap.put(activeGraph, newMap()); + } + final Map graph = (Map) nodeMap.get(activeGraph); + Map node = (Map) (activeSubject == null ? null + : graph.get(activeSubject)); + + // 3) + if (elem.containsKey(JsonLdConsts.TYPE)) { + // 3.1) + List oldTypes; + final List newTypes = new ArrayList(); + if (elem.get(JsonLdConsts.TYPE) instanceof List) { + oldTypes = (List) elem.get(JsonLdConsts.TYPE); + } else { + oldTypes = new ArrayList(4); + oldTypes.add((String) elem.get(JsonLdConsts.TYPE)); + } + for (final String item : oldTypes) { + if (item.startsWith("_:")) { + newTypes.add(generateBlankNodeIdentifier(item)); + } else { + newTypes.add(item); + } + } + if (elem.get(JsonLdConsts.TYPE) instanceof List) { + elem.put(JsonLdConsts.TYPE, newTypes); + } else { + elem.put(JsonLdConsts.TYPE, newTypes.get(0)); + } + } + + // 4) + if (elem.containsKey(JsonLdConsts.VALUE)) { + // 4.1) + if (list == null) { + JsonLdUtils.mergeValue(node, activeProperty, elem); + } + // 4.2) + else { + JsonLdUtils.mergeValue(list, JsonLdConsts.LIST, elem); + } + } + + // 5) + else if (elem.containsKey(JsonLdConsts.LIST)) { + // 5.1) + final Map result = newMap(JsonLdConsts.LIST, new ArrayList(4)); + // 5.2) + // for (final Object item : (List) elem.get("@list")) { + // generateNodeMap(item, nodeMap, activeGraph, activeSubject, + // activeProperty, result); + // } + generateNodeMap(elem.get(JsonLdConsts.LIST), nodeMap, activeGraph, activeSubject, + activeProperty, result); + // 5.3) + JsonLdUtils.mergeValue(node, activeProperty, result); + } + + // 6) + else { + // 6.1) + String id = (String) elem.remove(JsonLdConsts.ID); + if (id != null) { + if (id.startsWith("_:")) { + id = generateBlankNodeIdentifier(id); + } + } + // 6.2) + else { + id = generateBlankNodeIdentifier(null); + } + // 6.3) + if (!graph.containsKey(id)) { + final Map tmp = newMap(JsonLdConsts.ID, id); + graph.put(id, tmp); + } + // 6.4) TODO: SPEC this line is asked for by the spec, but it breaks + // various tests + // node = (Map) graph.get(id); + // 6.5) + if (activeSubject instanceof Map) { + // 6.5.1) + JsonLdUtils.mergeValue((Map) graph.get(id), activeProperty, + activeSubject); + } + // 6.6) + else if (activeProperty != null) { + final Map reference = newMap(JsonLdConsts.ID, id); + // 6.6.2) + if (list == null) { + // 6.6.2.1+2) + JsonLdUtils.mergeValue(node, activeProperty, reference); + } + // 6.6.3) TODO: SPEC says to add ELEMENT to @list member, should + // be REFERENCE + else { + JsonLdUtils.mergeValue(list, JsonLdConsts.LIST, reference); + } + } + // TODO: SPEC this is removed in the spec now, but it's still needed + // (see 6.4) + node = (Map) graph.get(id); + // 6.7) + if (elem.containsKey(JsonLdConsts.TYPE)) { + for (final Object type : (List) elem.remove(JsonLdConsts.TYPE)) { + JsonLdUtils.mergeValue(node, JsonLdConsts.TYPE, type); + } + } + // 6.8) + if (elem.containsKey(JsonLdConsts.INDEX)) { + final Object elemIndex = elem.remove(JsonLdConsts.INDEX); + if (node.containsKey(JsonLdConsts.INDEX)) { + if (!JsonLdUtils.deepCompare(node.get(JsonLdConsts.INDEX), elemIndex)) { + throw new JsonLdError(Error.CONFLICTING_INDEXES); + } + } else { + node.put(JsonLdConsts.INDEX, elemIndex); + } + } + // 6.9) + if (elem.containsKey(JsonLdConsts.REVERSE)) { + // 6.9.1) + final Map referencedNode = newMap(JsonLdConsts.ID, id); + // 6.9.2+6.9.4) + final Map reverseMap = (Map) elem + .remove(JsonLdConsts.REVERSE); + // 6.9.3) + for (final String property : reverseMap.keySet()) { + final List values = (List) reverseMap.get(property); + // 6.9.3.1) + for (final Object value : values) { + // 6.9.3.1.1) + generateNodeMap(value, nodeMap, activeGraph, referencedNode, property, + null); + } + } + } + // 6.10) + if (elem.containsKey(JsonLdConsts.GRAPH)) { + generateNodeMap(elem.remove(JsonLdConsts.GRAPH), nodeMap, id, null, null, null); + } + // 6.11) + final List keys = new ArrayList(elem.keySet()); + Collections.sort(keys); + for (String property : keys) { + final Object value = elem.get(property); + // 6.11.1) + if (property.startsWith("_:")) { + property = generateBlankNodeIdentifier(property); + } + // 6.11.2) + if (!node.containsKey(property)) { + node.put(property, new ArrayList(4)); + } + // 6.11.3) + generateNodeMap(value, nodeMap, activeGraph, id, property, null); + } + } + } + + /** + * Blank Node identifier map specified in: + *

+ * http://www.w3.org/TR/json-ld-api/#generate-blank-node-identifier + */ + private final Map blankNodeIdentifierMap = new LinkedHashMap(); + + /** + * Counter specified in: + *

+ * http://www.w3.org/TR/json-ld-api/#generate-blank-node-identifier + */ + private int blankNodeCounter = 0; + + /** + * Generates a blank node identifier for the given key using the algorithm + * specified in: + *

+ * http://www.w3.org/TR/json-ld-api/#generate-blank-node-identifier + * + * @param id The id, or null to generate a fresh, unused, blank node + * identifier. + * @return A blank node identifier based on id if it was not null, or a + * fresh, unused, blank node identifier if it was null. + */ + String generateBlankNodeIdentifier(String id) { + if (id != null && blankNodeIdentifierMap.containsKey(id)) { + return blankNodeIdentifierMap.get(id); + } + final String bnid = "_:b" + blankNodeCounter++; + if (id != null) { + blankNodeIdentifierMap.put(id, bnid); + } + return bnid; + } + + /** + * Generates a fresh, unused, blank node identifier using the algorithm + * specified in: + *

+ * http://www.w3.org/TR/json-ld-api/#generate-blank-node-identifier + * + * @return A fresh, unused, blank node identifier. + */ + String generateBlankNodeIdentifier() { + return generateBlankNodeIdentifier(null); + } + + /*** + * _____ _ _ _ _ _ _ | ___| __ __ _ _ __ ___ (_)_ __ __ _ / \ | | __ _ ___ _ + * __(_) |_| |__ _ __ ___ | |_ | '__/ _` | '_ ` _ \| | '_ \ / _` | / _ \ | + * |/ _` |/ _ \| '__| | __| '_ \| '_ ` _ \ | _|| | | (_| | | | | | | | | | | + * (_| | / ___ \| | (_| | (_) | | | | |_| | | | | | | | | |_| |_| \__,_|_| + * |_| |_|_|_| |_|\__, | /_/ \_\_|\__, |\___/|_| |_|\__|_| |_|_| |_| |_| + * |___/ |___/ + */ + + private class FramingContext { + public boolean embed; + public boolean explicit; + public boolean omitDefault; + + public FramingContext() { + embed = true; + explicit = false; + omitDefault = false; + embeds = null; + } + + public FramingContext(JsonLdOptions opts) { + this(); + if (opts.getEmbed() != null) { + this.embed = opts.getEmbed(); + } + if (opts.getExplicit() != null) { + this.explicit = opts.getExplicit(); + } + if (opts.getOmitDefault() != null) { + this.omitDefault = opts.getOmitDefault(); + } + } + + public Map embeds = null; + } + + private class EmbedNode { + public Object parent = null; + public String property = null; + } + + private Map nodeMap; + + /** + * Performs JSON-LD + * framing. + * + * @param input the expanded JSON-LD to frame. + * @param frame the expanded JSON-LD frame to use. + * @return the framed output. + * @throws JsonLdError If the framing was not successful. + */ + public List frame(Object input, List frame) throws JsonLdError { + // create framing state + final FramingContext state = new FramingContext(this.opts); + + // use tree map so keys are sotred by default + final Map nodes = new TreeMap(); + generateNodeMap(input, nodes); + this.nodeMap = (Map) nodes.get(JsonLdConsts.DEFAULT); + + final List framed = new ArrayList(); + // NOTE: frame validation is done by the function not allowing anything + // other than list to me passed + frame(state, this.nodeMap, + (frame != null && frame.size() > 0 ? (Map) frame.get(0) : newMap()), + framed, null); + + return framed; + } + + /** + * Frames subjects according to the given frame. + * + * @param state the current framing state. + * @param frame the frame. + * @param parent the parent subject or top-level array. + * @param property the parent property, initialized to null. + * @throws JsonLdError If there was an error during framing. + */ + private void frame(FramingContext state, Map nodes, Map frame, + Object parent, String property) throws JsonLdError { + + // filter out subjects that match the frame + final Map matches = filterNodes(state, nodes, frame); + + // get flags for current frame + Boolean embedOn = getFrameFlag(frame, JsonLdConsts.EMBED, state.embed); + final Boolean explicicOn = getFrameFlag(frame, JsonLdConsts.EXPLICIT, state.explicit); + + // add matches to output + final List ids = new ArrayList(matches.keySet()); + Collections.sort(ids); + for (final String id : ids) { + if (property == null) { + state.embeds = new LinkedHashMap(); + } + + // start output + final Map output = newMap(); + output.put(JsonLdConsts.ID, id); + + // prepare embed meta info + final EmbedNode embeddedNode = new EmbedNode(); + embeddedNode.parent = parent; + embeddedNode.property = property; + + // if embed is on and there is an existing embed + if (embedOn && state.embeds.containsKey(id)) { + final EmbedNode existing = state.embeds.get(id); + embedOn = false; + + if (existing.parent instanceof List) { + for (final Object p : (List) existing.parent) { + if (JsonLdUtils.compareValues(output, p)) { + embedOn = true; + break; + } + } + } + // existing embed's parent is an object + else { + if (((Map) existing.parent).containsKey(existing.property)) { + for (final Object v : (List) ((Map) existing.parent) + .get(existing.property)) { + if (v instanceof Map && Obj.equals(id, + ((Map) v).get(JsonLdConsts.ID))) { + embedOn = true; + break; + } + } + } + } + + // existing embed has already been added, so allow an overwrite + if (embedOn) { + removeEmbed(state, id); + } + } + + // not embedding, add output without any other properties + if (!embedOn) { + addFrameOutput(state, parent, property, output); + } else { + // add embed meta info + state.embeds.put(id, embeddedNode); + + // iterate over subject properties + final Map element = (Map) matches.get(id); + List props = new ArrayList(element.keySet()); + Collections.sort(props); + for (final String prop : props) { + + // copy keywords to output + if (isKeyword(prop)) { + output.put(prop, JsonLdUtils.clone(element.get(prop))); + continue; + } + + // if property isn't in the frame + if (!frame.containsKey(prop)) { + // if explicit is off, embed values + if (!explicicOn) { + embedValues(state, element, prop, output); + } + continue; + } + + // add objects + final List value = (List) element.get(prop); + + for (final Object item : value) { + + // recurse into list + if ((item instanceof Map) + && ((Map) item).containsKey(JsonLdConsts.LIST)) { + // add empty list + final Map list = newMap(); + list.put(JsonLdConsts.LIST, new ArrayList()); + addFrameOutput(state, output, prop, list); + + // add list objects + for (final Object listitem : (List) ((Map) item) + .get(JsonLdConsts.LIST)) { + // recurse into subject reference + if (JsonLdUtils.isNodeReference(listitem)) { + final Map tmp = newMap(); + final String itemid = (String) ((Map) listitem) + .get(JsonLdConsts.ID); + // TODO: nodes may need to be node_map, + // which is global + tmp.put(itemid, this.nodeMap.get(itemid)); + frame(state, tmp, + (Map) ((List) frame.get(prop)) + .get(0), + list, JsonLdConsts.LIST); + } else { + // include other values automatcially (TODO: + // may need JsonLdUtils.clone(n)) + addFrameOutput(state, list, JsonLdConsts.LIST, listitem); + } + } + } + + // recurse into subject reference + else if (JsonLdUtils.isNodeReference(item)) { + final Map tmp = newMap(); + final String itemid = (String) ((Map) item) + .get(JsonLdConsts.ID); + // TODO: nodes may need to be node_map, which is + // global + tmp.put(itemid, this.nodeMap.get(itemid)); + frame(state, tmp, + (Map) ((List) frame.get(prop)).get(0), + output, prop); + } else { + // include other values automatically (TODO: may + // need JsonLdUtils.clone(o)) + addFrameOutput(state, output, prop, item); + } + } + } + + // handle defaults + props = new ArrayList(frame.keySet()); + Collections.sort(props); + for (final String prop : props) { + // skip keywords + if (isKeyword(prop)) { + continue; + } + + final List pf = (List) frame.get(prop); + Map propertyFrame = pf.size() > 0 + ? (Map) pf.get(0) : null; + if (propertyFrame == null) { + propertyFrame = newMap(); + } + final boolean omitDefaultOn = getFrameFlag(propertyFrame, + JsonLdConsts.OMIT_DEFAULT, state.omitDefault); + if (!omitDefaultOn && !output.containsKey(prop)) { + Object def = "@null"; + if (propertyFrame.containsKey(JsonLdConsts.DEFAULT)) { + def = JsonLdUtils.clone(propertyFrame.get(JsonLdConsts.DEFAULT)); + } + if (!(def instanceof List)) { + final List tmp = new ArrayList(); + tmp.add(def); + def = tmp; + } + final Map tmp1 = newMap(JsonLdConsts.PRESERVE, def); + final List tmp2 = new ArrayList(); + tmp2.add(tmp1); + output.put(prop, tmp2); + } + } + + // add output to parent + addFrameOutput(state, parent, property, output); + } + } + } + + private Boolean getFrameFlag(Map frame, String name, boolean thedefault) { + Object value = frame.get(name); + if (value instanceof List) { + if (((List) value).size() > 0) { + value = ((List) value).get(0); + } + } + if (value instanceof Map && ((Map) value).containsKey(JsonLdConsts.VALUE)) { + value = ((Map) value).get(JsonLdConsts.VALUE); + } + if (value instanceof Boolean) { + return (Boolean) value; + } + return thedefault; + } + + /** + * Removes an existing embed. + * + * @param state the current framing state. + * @param id the @id of the embed to remove. + */ + private static void removeEmbed(FramingContext state, String id) { + // get existing embed + final Map embeds = state.embeds; + final EmbedNode embed = embeds.get(id); + final Object parent = embed.parent; + final String property = embed.property; + + // create reference to replace embed + final Map node = newMap(JsonLdConsts.ID, id); + + // remove existing embed + if (JsonLdUtils.isNode(parent)) { + // replace subject with reference + final List newvals = new ArrayList(); + final List oldvals = (List) ((Map) parent) + .get(property); + for (final Object v : oldvals) { + if (v instanceof Map + && Obj.equals(((Map) v).get(JsonLdConsts.ID), id)) { + newvals.add(node); + } else { + newvals.add(v); + } + } + ((Map) parent).put(property, newvals); + } + // recursively remove dependent dangling embeds + removeDependents(embeds, id); + } + + private static void removeDependents(Map embeds, String id) { + // get embed keys as a separate array to enable deleting keys in map + for (final String id_dep : new HashSet(embeds.keySet())) { + final EmbedNode e = embeds.get(id_dep); + if (e == null || e.parent == null || !(e.parent instanceof Map)) { + continue; + } + final String pid = (String) ((Map) e.parent).get(JsonLdConsts.ID); + if (Obj.equals(id, pid)) { + embeds.remove(id_dep); + removeDependents(embeds, id_dep); + } + } + } + + private Map filterNodes(FramingContext state, Map nodes, + Map frame) throws JsonLdError { + final Map rval = newMap(); + for (final String id : nodes.keySet()) { + final Map element = (Map) nodes.get(id); + if (element != null && filterNode(state, element, frame)) { + rval.put(id, element); + } + } + return rval; + } + + private boolean filterNode(FramingContext state, Map node, + Map frame) throws JsonLdError { + final Object types = frame.get(JsonLdConsts.TYPE); + if (types != null) { + if (!(types instanceof List)) { + throw new JsonLdError(Error.SYNTAX_ERROR, "frame @type must be an array"); + } + Object nodeTypes = node.get(JsonLdConsts.TYPE); + if (nodeTypes == null) { + nodeTypes = new ArrayList(); + } else if (!(nodeTypes instanceof List)) { + throw new JsonLdError(Error.SYNTAX_ERROR, "node @type must be an array"); + } + if (((List) types).size() == 1 && ((List) types).get(0) instanceof Map + && ((Map) ((List) types).get(0)).size() == 0) { + return !((List) nodeTypes).isEmpty(); + } else { + for (final Object i : (List) nodeTypes) { + for (final Object j : (List) types) { + if (JsonLdUtils.deepCompare(i, j)) { + return true; + } + } + } + return false; + } + } else { + for (final String key : frame.keySet()) { + if (JsonLdConsts.ID.equals(key) || !isKeyword(key) && !(node.containsKey(key))) { + + final Object frameObject = frame.get(key); + if (frameObject instanceof ArrayList) { + final ArrayList o = (ArrayList) frame.get(key); + + boolean _default = false; + for (final Object oo : o) { + if (oo instanceof Map) { + if (((Map) oo).containsKey(JsonLdConsts.DEFAULT)) { + _default = true; + } + } + } + if (_default) { + continue; + } + } + + return false; + } + } + return true; + } + } + + /** + * Adds framing output to the given parent. + * + * @param state the current framing state. + * @param parent the parent to add to. + * @param property the parent property. + * @param output the output to add. + */ + private static void addFrameOutput(FramingContext state, Object parent, String property, + Object output) { + if (parent instanceof Map) { + List prop = (List) ((Map) parent).get(property); + if (prop == null) { + prop = new ArrayList(); + ((Map) parent).put(property, prop); + } + prop.add(output); + } else { + ((List) parent).add(output); + } + } + + /** + * Embeds values for the given subject and property into the given output + * during the framing algorithm. + * + * @param state the current framing state. + * @param element the subject. + * @param property the property. + * @param output the output. + */ + private void embedValues(FramingContext state, Map element, String property, + Object output) { + // embed subject properties in output + final List objects = (List) element.get(property); + for (Object o : objects) { + // handle subject reference + if (JsonLdUtils.isNodeReference(o)) { + final String sid = (String) ((Map) o).get(JsonLdConsts.ID); + + // embed full subject if isn't already embedded + if (!state.embeds.containsKey(sid)) { + // add embed + final EmbedNode embed = new EmbedNode(); + embed.parent = output; + embed.property = property; + state.embeds.put(sid, embed); + + // recurse into subject + o = newMap(); + Map s = (Map) this.nodeMap.get(sid); + if (s == null) { + s = newMap(JsonLdConsts.ID, sid); + } + for (final String prop : s.keySet()) { + // copy keywords + if (isKeyword(prop)) { + ((Map) o).put(prop, JsonLdUtils.clone(s.get(prop))); + continue; + } + embedValues(state, s, prop, o); + } + } + addFrameOutput(state, output, property, o); + } + // copy non-subject value + else { + addFrameOutput(state, output, property, JsonLdUtils.clone(o)); + } + } + } + + /*** + * ____ _ __ ____ ____ _____ _ _ _ _ _ / ___|___ _ ____ _____ _ __| |_ / _|_ + * __ ___ _ __ ___ | _ \| _ \| ___| / \ | | __ _ ___ _ __(_) |_| |__ _ __ + * ___ | | / _ \| '_ \ \ / / _ \ '__| __| | |_| '__/ _ \| '_ ` _ \ | |_) | | + * | | |_ / _ \ | |/ _` |/ _ \| '__| | __| '_ \| '_ ` _ \ | |__| (_) | | | \ + * V / __/ | | |_ | _| | | (_) | | | | | | | _ <| |_| | _| / ___ \| | (_| | + * (_) | | | | |_| | | | | | | | | \____\___/|_| |_|\_/ \___|_| \__| |_| |_| + * \___/|_| |_| |_| |_| \_\____/|_| /_/ \_\_|\__, |\___/|_| |_|\__|_| |_|_| + * |_| |_| |___/ + */ + + /** + * Helper class for node usages + * + * @author tristan + */ + private class UsagesNode { + public UsagesNode(NodeMapNode node, String property, Map value) { + this.node = node; + this.property = property; + this.value = value; + } + + public NodeMapNode node = null; + public String property = null; + public Map value = null; + } + + private class NodeMapNode extends LinkedHashMap { + public List usages = new ArrayList(4); + + public NodeMapNode(String id) { + super(); + this.put(JsonLdConsts.ID, id); + } + + // helper fucntion for 4.3.3 + public boolean isWellFormedListNode() { + if (usages.size() != 1) { + return false; + } + int keys = 0; + if (containsKey(RDF_FIRST)) { + keys++; + if (!(get(RDF_FIRST) instanceof List + && ((List) get(RDF_FIRST)).size() == 1)) { + return false; + } + } + if (containsKey(RDF_REST)) { + keys++; + if (!(get(RDF_REST) instanceof List + && ((List) get(RDF_REST)).size() == 1)) { + return false; + } + } + if (containsKey(JsonLdConsts.TYPE)) { + keys++; + if (!(get(JsonLdConsts.TYPE) instanceof List + && ((List) get(JsonLdConsts.TYPE)).size() == 1) + && RDF_LIST.equals(((List) get(JsonLdConsts.TYPE)).get(0))) { + return false; + } + } + // TODO: SPEC: 4.3.3 has no mention of @id + if (containsKey(JsonLdConsts.ID)) { + keys++; + } + if (keys < size()) { + return false; + } + return true; + } + + // return this node without the usages variable + public Map serialize() { + return new LinkedHashMap(this); + } + } + + /** + * Converts RDF statements into JSON-LD. + * + * @param dataset the RDF statements. + * @return A list of JSON-LD objects found in the given dataset. + * @throws JsonLdError If there was an error during conversion from RDF to JSON-LD. + */ + public List fromRDF(final RDFDataset dataset) throws JsonLdError { + return fromRDF(dataset, false); + } + + /** + * Converts RDF statements into JSON-LD, presuming that there are no + * duplicates in the dataset. + * + * @param dataset the RDF statements. + * @param noDuplicatesInDataset True if there are no duplicates in the dataset and false + * otherwise. + * @return A list of JSON-LD objects found in the given dataset. + * @throws JsonLdError If there was an error during conversion from RDF to JSON-LD. + * @deprecated Experimental method, only use if you are sure you need to use + * this method. Most users will need to use + * {@link #fromRDF(RDFDataset)}. + */ + @Deprecated + public List fromRDF(final RDFDataset dataset, boolean noDuplicatesInDataset) + throws JsonLdError { + // 1) + final Map defaultGraph = new LinkedHashMap(4); + // 2) + final Map> graphMap = new LinkedHashMap>( + 4); + graphMap.put(JsonLdConsts.DEFAULT, defaultGraph); + + // 3/3.1) + for (final String name : dataset.graphNames()) { + + final List graph = dataset.getQuads(name); + + // 3.2+3.4) + Map nodeMap; + if (!graphMap.containsKey(name)) { + nodeMap = new LinkedHashMap(); + graphMap.put(name, nodeMap); + } else { + nodeMap = graphMap.get(name); + } + + // 3.3) + if (!JsonLdConsts.DEFAULT.equals(name) && !Obj.contains(defaultGraph, name)) { + defaultGraph.put(name, new NodeMapNode(name)); + } + + // 3.5) + for (final RDFDataset.Quad triple : graph) { + final String subject = triple.getSubject().getValue(); + final String predicate = triple.getPredicate().getValue(); + final RDFDataset.Node object = triple.getObject(); + + // 3.5.1+3.5.2) + NodeMapNode node; + if (!nodeMap.containsKey(subject)) { + node = new NodeMapNode(subject); + nodeMap.put(subject, node); + } else { + node = nodeMap.get(subject); + } + + // 3.5.3) + if ((object.isIRI() || object.isBlankNode()) + && !nodeMap.containsKey(object.getValue())) { + nodeMap.put(object.getValue(), new NodeMapNode(object.getValue())); + } + + // 3.5.4) + if (RDF_TYPE.equals(predicate) && (object.isIRI() || object.isBlankNode()) + && !opts.getUseRdfType()) { + JsonLdUtils.mergeValue(node, JsonLdConsts.TYPE, object.getValue()); + continue; + } + + // 3.5.5) + final Map value = object.toObject(opts.getUseNativeTypes()); + + // 3.5.6+7) + if (noDuplicatesInDataset) { + JsonLdUtils.laxMergeValue(node, predicate, value); + } else { + JsonLdUtils.mergeValue(node, predicate, value); + } + + // 3.5.8) + if (object.isBlankNode() || object.isIRI()) { + // 3.5.8.1-3) + nodeMap.get(object.getValue()).usages + .add(new UsagesNode(node, predicate, value)); + } + } + } + + // 4) + for (final String name : graphMap.keySet()) { + final Map graph = graphMap.get(name); + + // 4.1) + if (!graph.containsKey(RDF_NIL)) { + continue; + } + + // 4.2) + final NodeMapNode nil = graph.get(RDF_NIL); + // 4.3) + for (final UsagesNode usage : nil.usages) { + // 4.3.1) + NodeMapNode node = usage.node; + String property = usage.property; + Map head = usage.value; + // 4.3.2) + final List list = new ArrayList(4); + final List listNodes = new ArrayList(4); + // 4.3.3) + while (RDF_REST.equals(property) && node.isWellFormedListNode()) { + // 4.3.3.1) + list.add(((List) node.get(RDF_FIRST)).get(0)); + // 4.3.3.2) + listNodes.add((String) node.get(JsonLdConsts.ID)); + // 4.3.3.3) + final UsagesNode nodeUsage = node.usages.get(0); + // 4.3.3.4) + node = nodeUsage.node; + property = nodeUsage.property; + head = nodeUsage.value; + // 4.3.3.5) + if (!JsonLdUtils.isBlankNode(node)) { + break; + } + } + // 4.3.4) + if (RDF_FIRST.equals(property)) { + // 4.3.4.1) + if (RDF_NIL.equals(node.get(JsonLdConsts.ID))) { + continue; + } + // 4.3.4.3) + final String headId = (String) head.get(JsonLdConsts.ID); + // 4.3.4.4-5) + head = (Map) ((List) graph.get(headId).get(RDF_REST)) + .get(0); + // 4.3.4.6) + list.remove(list.size() - 1); + listNodes.remove(listNodes.size() - 1); + } + // 4.3.5) + head.remove(JsonLdConsts.ID); + // 4.3.6) + Collections.reverse(list); + // 4.3.7) + head.put(JsonLdConsts.LIST, list); + // 4.3.8) + for (final String nodeId : listNodes) { + graph.remove(nodeId); + } + } + } + + // 5) + final List result = new ArrayList(4); + // 6) + final List ids = new ArrayList(defaultGraph.keySet()); + Collections.sort(ids); + for (final String subject : ids) { + final NodeMapNode node = defaultGraph.get(subject); + // 6.1) + if (graphMap.containsKey(subject)) { + // 6.1.1) + node.put(JsonLdConsts.GRAPH, new ArrayList(4)); + // 6.1.2) + final List keys = new ArrayList(graphMap.get(subject).keySet()); + Collections.sort(keys); + for (final String s : keys) { + final NodeMapNode n = graphMap.get(subject).get(s); + if (n.size() == 1 && n.containsKey(JsonLdConsts.ID)) { + continue; + } + ((List) node.get(JsonLdConsts.GRAPH)).add(n.serialize()); + } + } + // 6.2) + if (node.size() == 1 && node.containsKey(JsonLdConsts.ID)) { + continue; + } + result.add(node.serialize()); + } + + return result; + } + + /*** + * ____ _ _ ____ ____ _____ _ _ _ _ _ / ___|___ _ ____ _____ _ __| |_ | |_ + * ___ | _ \| _ \| ___| / \ | | __ _ ___ _ __(_) |_| |__ _ __ ___ | | / _ \| + * '_ \ \ / / _ \ '__| __| | __/ _ \ | |_) | | | | |_ / _ \ | |/ _` |/ _ \| + * '__| | __| '_ \| '_ ` _ \ | |__| (_) | | | \ V / __/ | | |_ | || (_) | | + * _ <| |_| | _| / ___ \| | (_| | (_) | | | | |_| | | | | | | | | + * \____\___/|_| |_|\_/ \___|_| \__| \__\___/ |_| \_\____/|_| /_/ \_\_|\__, + * |\___/|_| |_|\__|_| |_|_| |_| |_| |___/ + */ + + /** + * Adds RDF triples for each graph in the current node map to an RDF + * dataset. + * + * @return the RDF dataset. + * @throws JsonLdError If there was an error converting from JSON-LD to RDF. + */ + public RDFDataset toRDF() throws JsonLdError { + // TODO: make the default generateNodeMap call (i.e. without a + // graphName) create and return the nodeMap + final Map nodeMap = newMap(); + nodeMap.put(JsonLdConsts.DEFAULT, newMap()); + generateNodeMap(this.value, nodeMap); + + final RDFDataset dataset = new RDFDataset(this); + + for (final String graphName : nodeMap.keySet()) { + // 4.1) + if (JsonLdUtils.isRelativeIri(graphName)) { + continue; + } + final Map graph = (Map) nodeMap.get(graphName); + dataset.graphToRDF(graphName, graph); + } + + return dataset; + } + + /*** + * _ _ _ _ _ _ _ _ _ _ _ | \ | | ___ _ __ _ __ ___ __ _| (_)______ _| |_(_) + * ___ _ __ / \ | | __ _ ___ _ __(_) |_| |__ _ __ ___ | \| |/ _ \| '__| '_ ` + * _ \ / _` | | |_ / _` | __| |/ _ \| '_ \ / _ \ | |/ _` |/ _ \| '__| | __| + * '_ \| '_ ` _ \ | |\ | (_) | | | | | | | | (_| | | |/ / (_| | |_| | (_) | + * | | | / ___ \| | (_| | (_) | | | | |_| | | | | | | | | |_| \_|\___/|_| + * |_| |_| |_|\__,_|_|_/___\__,_|\__|_|\___/|_| |_| /_/ \_\_|\__, |\___/|_| + * |_|\__|_| |_|_| |_| |_| |___/ + */ + + /** + * Performs RDF normalization on the given JSON-LD input. + * + * @param dataset the expanded JSON-LD object to normalize. + * @return The normalized JSON-LD object + * @throws JsonLdError If there was an error while normalizing. + */ + public Object normalize(Map dataset, JsonLdOptions options) throws JsonLdError { + if (this.opts.getAlgorithm().equals(JsonLdOptions.URGNA2012)) { + return normalizeURGN2012(dataset); + } else if (this.opts.getAlgorithm().equals(JsonLdOptions.URDNA2015)) { + return normalizeURDNA2015(dataset, options); + } + + return null; + } + + public Object normalizeURGN2012(Map dataset) throws JsonLdError { + // create quads and map bnodes to their associated quads + final List quads = new ArrayList(); + final Map bnodes = newMap(); + for (String graphName : dataset.keySet()) { + final List> triples = (List>) dataset + .get(graphName); + if (JsonLdConsts.DEFAULT.equals(graphName)) { + graphName = null; + } + for (final Map quad : triples) { + if (graphName != null) { + if (graphName.indexOf("_:") == 0) { + final Map tmp = newMap(); + tmp.put("type", "blank node"); + tmp.put("value", graphName); + quad.put("name", tmp); + } else { + final Map tmp = newMap(); + tmp.put("type", "IRI"); + tmp.put("value", graphName); + quad.put("name", tmp); + } + } + quads.add(quad); + + final String[] attrs = new String[]{"subject", "object", "name"}; + for (final String attr : attrs) { + if (quad.containsKey(attr) && "blank node" + .equals(((Map) quad.get(attr)).get("type"))) { + final String id = (String) ((Map) quad.get(attr)) + .get("value"); + if (!bnodes.containsKey(id)) { + bnodes.put(id, new LinkedHashMap>() { + { + put("quads", new ArrayList()); + } + }); + } + ((List) ((Map) bnodes.get(id)).get("quads")) + .add(quad); + } + } + } + } + + // mapping complete, start canonical naming + final NormalizeUtils normalizeUtils = new NormalizeUtils(quads, bnodes, + new UniqueNamer("_:c14n"), opts); + return normalizeUtils.hashBlankNodes(bnodes.keySet()); + } + + public Object normalizeURDNA2015(Map dataset, JsonLdOptions options) throws JsonLdError { + return new Urdna2015(dataset, options).normalize(); + } + +} \ No newline at end of file diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdConsts.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdConsts.java new file mode 100644 index 0000000..9de5b76 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdConsts.java @@ -0,0 +1,65 @@ +package com.github.jsonldjava.core; + +/** + * URI Constants used in the JSON-LD parser. + */ +public final class JsonLdConsts { + + public static final String RDF_SYNTAX_NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; + public static final String RDF_SCHEMA_NS = "http://www.w3.org/2000/01/rdf-schema#"; + public static final String XSD_NS = "http://www.w3.org/2001/XMLSchema#"; + + public static final String XSD_ANYTYPE = XSD_NS + "anyType"; + public static final String XSD_BOOLEAN = XSD_NS + "boolean"; + public static final String XSD_DOUBLE = XSD_NS + "double"; + public static final String XSD_INTEGER = XSD_NS + "integer"; + public static final String XSD_FLOAT = XSD_NS + "float"; + public static final String XSD_DECIMAL = XSD_NS + "decimal"; + public static final String XSD_ANYURI = XSD_NS + "anyURI"; + public static final String XSD_STRING = XSD_NS + "string"; + + public static final String RDF_TYPE = RDF_SYNTAX_NS + "type"; + public static final String RDF_FIRST = RDF_SYNTAX_NS + "first"; + public static final String RDF_REST = RDF_SYNTAX_NS + "rest"; + public static final String RDF_NIL = RDF_SYNTAX_NS + "nil"; + public static final String RDF_PLAIN_LITERAL = RDF_SYNTAX_NS + "PlainLiteral"; + public static final String RDF_XML_LITERAL = RDF_SYNTAX_NS + "XMLLiteral"; + public static final String RDF_OBJECT = RDF_SYNTAX_NS + "object"; + public static final String RDF_LANGSTRING = RDF_SYNTAX_NS + "langString"; + public static final String RDF_LIST = RDF_SYNTAX_NS + "List"; + + public static final String TEXT_TURTLE = "text/turtle"; + public static final String APPLICATION_NQUADS = "application/n-quads"; // https://www.w3.org/TR/n-quads/#sec-mediatype + + public static final String FLATTENED = "flattened"; + public static final String COMPACTED = "compacted"; + public static final String EXPANDED = "expanded"; + + public static final String ID = "@id"; + public static final String DEFAULT = "@default"; + public static final String GRAPH = "@graph"; + public static final String CONTEXT = "@context"; + public static final String PRESERVE = "@preserve"; + public static final String EXPLICIT = "@explicit"; + public static final String OMIT_DEFAULT = "@omitDefault"; + public static final String EMBED_CHILDREN = "@embedChildren"; + public static final String EMBED = "@embed"; + public static final String LIST = "@list"; + public static final String LANGUAGE = "@language"; + public static final String INDEX = "@index"; + public static final String SET = "@set"; + public static final String TYPE = "@type"; + public static final String REVERSE = "@reverse"; + public static final String VALUE = "@value"; + public static final String NULL = "@null"; + public static final String NONE = "@none"; + public static final String CONTAINER = "@container"; + public static final String BLANK_NODE_PREFIX = "_:"; + public static final String VOCAB = "@vocab"; + public static final String BASE = "@base"; + public static final String REQUIRE_ALL = "@requireAll"; + + public enum Embed { + ALWAYS, NEVER, LAST, LINK; + } +} \ No newline at end of file diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdError.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdError.java new file mode 100644 index 0000000..21ab577 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdError.java @@ -0,0 +1,142 @@ +package com.github.jsonldjava.core; + +public class JsonLdError extends RuntimeException { + + private static final long serialVersionUID = -8685402790466459014L; + private final Error type; + + public JsonLdError(Error type, Object detail) { + // TODO: pretty toString (e.g. print whole json objects) + super(detail == null ? "" : detail.toString()); + this.type = type; + } + + public JsonLdError(Error type) { + super(""); + this.type = type; + } + + public JsonLdError(Error type, Object detail, Throwable cause) { + // TODO: pretty toString (e.g. print whole json objects) + super(detail == null ? "" : detail.toString(), cause); + this.type = type; + } + + public JsonLdError(Error type, Throwable cause) { + super(cause); + this.type = type; + } + + public enum Error { + LOADING_DOCUMENT_FAILED("loading document failed"), + + LIST_OF_LISTS("list of lists"), + + INVALID_INDEX_VALUE("invalid @index value"), + + CONFLICTING_INDEXES("conflicting indexes"), + + INVALID_ID_VALUE("invalid @id value"), + + INVALID_LOCAL_CONTEXT("invalid local context"), + + MULTIPLE_CONTEXT_LINK_HEADERS("multiple context link headers"), + + LOADING_REMOTE_CONTEXT_FAILED("loading remote context failed"), + + LOADING_INJECTED_CONTEXT_FAILED("loading injected context failed"), + + INVALID_REMOTE_CONTEXT("invalid remote context"), + + RECURSIVE_CONTEXT_INCLUSION("recursive context inclusion"), + + INVALID_BASE_IRI("invalid base IRI"), + + INVALID_VOCAB_MAPPING("invalid vocab mapping"), + + INVALID_DEFAULT_LANGUAGE("invalid default language"), + + KEYWORD_REDEFINITION("keyword redefinition"), + + INVALID_TERM_DEFINITION("invalid term definition"), + + INVALID_REVERSE_PROPERTY("invalid reverse property"), + + INVALID_IRI_MAPPING("invalid IRI mapping"), + + CYCLIC_IRI_MAPPING("cyclic IRI mapping"), + + INVALID_KEYWORD_ALIAS("invalid keyword alias"), + + INVALID_TYPE_MAPPING("invalid type mapping"), + + INVALID_LANGUAGE_MAPPING("invalid language mapping"), + + COLLIDING_KEYWORDS("colliding keywords"), + + INVALID_CONTAINER_MAPPING("invalid container mapping"), + + INVALID_TYPE_VALUE("invalid type value"), + + INVALID_VALUE_OBJECT("invalid value object"), + + INVALID_VALUE_OBJECT_VALUE("invalid value object value"), + + INVALID_LANGUAGE_TAGGED_STRING("invalid language-tagged string"), + + INVALID_LANGUAGE_TAGGED_VALUE("invalid language-tagged value"), + + INVALID_TYPED_VALUE("invalid typed value"), + + INVALID_SET_OR_LIST_OBJECT("invalid set or list object"), + + INVALID_LANGUAGE_MAP_VALUE("invalid language map value"), + + COMPACTION_TO_LIST_OF_LISTS("compaction to list of lists"), + + INVALID_REVERSE_PROPERTY_MAP("invalid reverse property map"), + + INVALID_REVERSE_VALUE("invalid @reverse value"), + + INVALID_REVERSE_PROPERTY_VALUE("invalid reverse property value"), + + INVALID_EMBED_VALUE("invalid @embed value"), + + // non spec related errors + SYNTAX_ERROR("syntax error"), + + NOT_IMPLEMENTED("not implemnted"), + + UNKNOWN_FORMAT("unknown format"), + + INVALID_INPUT("invalid input"), + + PARSE_ERROR("parse error"), + + UNKNOWN_ERROR("unknown error"); + + private final String error; + + private Error(String error) { + this.error = error; + } + + @Override + public String toString() { + return error; + } + } + + public Error getType() { + return type; + } + + @Override + public String getMessage() { + final String msg = super.getMessage(); + if (msg != null && !"".equals(msg)) { + return type.toString() + ": " + msg; + } + return type.toString(); + } +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdOptions.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdOptions.java new file mode 100644 index 0000000..8290048 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdOptions.java @@ -0,0 +1,222 @@ +package com.github.jsonldjava.core; + +/** + * The JsonLdOptions type as specified in the + * JSON-LD- + * API specification. + * + * @author tristan + * + */ +public class JsonLdOptions { + + public static final String JSON_LD_1_0 = "json-ld-1.0"; + + public static final String JSON_LD_1_1 = "json-ld-1.1"; + + public static final boolean DEFAULT_COMPACT_ARRAYS = true; + + public static final String URGNA2012 = "URGNA2012"; + public static final String URDNA2015 = "URDNA2015"; + + /** + * Constructs an instance of JsonLdOptions using an empty base. + */ + public JsonLdOptions() { + this(""); + } + + /** + * Constructs an instance of JsonLdOptions using the given base. + * + * @param base + * The base IRI for the document. + */ + public JsonLdOptions(String base) { + this.setBase(base); + } + + /** + * Creates a shallow copy of this JsonLdOptions object. + * + * It will share the same DocumentLoader unless that is overridden, and + * other mutable objects, so it isn't immutable. + * + * @return A copy of this JsonLdOptions object. + */ + /*public JsonLdOptions copy() { + final JsonLdOptions copy = new JsonLdOptions(base); + + copy.setCompactArrays(compactArrays); + copy.setExpandContext(expandContext); + copy.setProcessingMode(processingMode); + copy.setDocumentLoader(documentLoader); + copy.setEmbed(embed); + copy.setExplicit(explicit); + copy.setOmitDefault(omitDefault); + copy.setOmitGraph(omitGraph); + copy.setFrameExpansion(frameExpansion); + copy.setPruneBlankNodeIdentifiers(pruneBlankNodeIdentifiers); + copy.setRequireAll(requireAll); + copy.setAllowContainerSetOnType(allowContainerSetOnType); + copy.setUseRdfType(useRdfType); + copy.setUseNativeTypes(useNativeTypes); + copy.setProduceGeneralizedRdf(produceGeneralizedRdf); + copy.format = format; + copy.useNamespaces = useNamespaces; + copy.outputForm = outputForm; + + return copy; + }*/ + + // Base options : http://www.w3.org/TR/json-ld-api/#idl-def-JsonLdOptions + + /** + * http://www.w3.org/TR/json-ld-api/#widl-JsonLdOptions-base + */ + private String base = null; + + /** + * http://www.w3.org/TR/json-ld-api/#widl-JsonLdOptions-compactArrays + */ + private Boolean compactArrays = DEFAULT_COMPACT_ARRAYS; + /** + * http://www.w3.org/TR/json-ld-api/#widl-JsonLdOptions-expandContext + */ + private Object expandContext = null; + /** + * http://www.w3.org/TR/json-ld-api/#widl-JsonLdOptions-processingMode + */ + private String processingMode = JSON_LD_1_0; + + /** + * Normalitazion algorithm + */ + private String algorithm = URGNA2012; + + /** + * http://www.w3.org/TR/json-ld-api/#widl-JsonLdOptions-documentLoader + */ + private DocumentLoader documentLoader = new DocumentLoader(); + + // Frame options : http://json-ld.org/spec/latest/json-ld-framing/ + + private Boolean embed = null; + private Boolean explicit = null; + private Boolean omitDefault = null; + //private Boolean omitGraph = false; + //private Boolean frameExpansion = false; + //private Boolean pruneBlankNodeIdentifiers = false; + //private Boolean requireAll = false; + //private Boolean allowContainerSetOnType = false; + + // RDF conversion options : + // http://www.w3.org/TR/json-ld-api/#serialize-rdf-as-json-ld-algorithm + + Boolean useRdfType = false; + Boolean useNativeTypes = false; + private boolean produceGeneralizedRdf = false; + + public Boolean getEmbed() { + return embed; + } + + public void setEmbed(Boolean embed) { + this.embed = embed; + } + + public Boolean getExplicit() { + return explicit; + } + + public void setExplicit(Boolean explicit) { + this.explicit = explicit; + } + + public Boolean getOmitDefault() { + return omitDefault; + } + + public void setOmitDefault(Boolean omitDefault) { + this.omitDefault = omitDefault; + } + + public Boolean getCompactArrays() { + return compactArrays; + } + + public void setCompactArrays(Boolean compactArrays) { + this.compactArrays = compactArrays; + } + + public Object getExpandContext() { + return expandContext; + } + + public void setExpandContext(Object expandContext) { + this.expandContext = expandContext; + } + + public String getProcessingMode() { + return processingMode; + } + + public void setProcessingMode(String processingMode) { + this.processingMode = processingMode; + } + + public void setAlgorithm(String algorithm) { + this.algorithm = algorithm; + } + + public String getAlgorithm() { + return this.algorithm; + } + + public String getBase() { + return base; + } + + public void setBase(String base) { + this.base = base; + } + + public Boolean getUseRdfType() { + return useRdfType; + } + + public void setUseRdfType(Boolean useRdfType) { + this.useRdfType = useRdfType; + } + + public Boolean getUseNativeTypes() { + return useNativeTypes; + } + + public void setUseNativeTypes(Boolean useNativeTypes) { + this.useNativeTypes = useNativeTypes; + } + + public boolean getProduceGeneralizedRdf() { + return this.produceGeneralizedRdf; + } + + public void setProduceGeneralizedRdf(Boolean produceGeneralizedRdf) { + this.produceGeneralizedRdf = produceGeneralizedRdf; + } + + public DocumentLoader getDocumentLoader() { + return documentLoader; + } + + public void setDocumentLoader(DocumentLoader documentLoader) { + this.documentLoader = documentLoader; + } + + // TODO: THE FOLLOWING ONLY EXIST SO I DON'T HAVE TO DELETE A LOT OF CODE, + // REMOVE IT WHEN DONE + public String format = null; + public Boolean useNamespaces = false; + public String outputForm = null; + +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdProcessor.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdProcessor.java new file mode 100644 index 0000000..4548664 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdProcessor.java @@ -0,0 +1,570 @@ +package com.github.jsonldjava.core; + +import com.github.jsonldjava.core.JsonLdError.Error; +import com.github.jsonldjava.impl.NQuadRDFParser; +import com.github.jsonldjava.impl.NQuadTripleCallback; + +import java.util.*; + +import static com.github.jsonldjava.utils.Obj.newMap; + + +/** + * Created by noah on 09/04/17. + */ + +/** + * This class implements the + * JsonLdProcessor interface, except that it does not currently support + * asynchronous processing, and hence does not return Promises, instead directly + * returning the results. + * + * @author tristan + */ +public abstract class JsonLdProcessor { + + public abstract void onNormalized(Object object); + + + /** + * Compacts the given input using the context according to the steps in the + * + * Compaction algorithm. + * + * @param input The input JSON-LD object. + * @param context The context object to use for the compaction algorithm. + * @param opts The {@link JsonLdOptions} that are to be sent to the + * compaction algorithm. + * @return The compacted JSON-LD document + * @throws JsonLdError If there is an error while compacting. + */ + public static Map compact(Object input, Object context, JsonLdOptions opts) + throws JsonLdError { + // 1) + // TODO: look into java futures/promises + + // 2-6) NOTE: these are all the same steps as in expand + final Object expanded = expand(input, opts); + // 7) + if (context instanceof Map + && ((Map) context).containsKey(JsonLdConsts.CONTEXT)) { + context = ((Map) context).get(JsonLdConsts.CONTEXT); + } + Context activeCtx = new Context(opts); + activeCtx = activeCtx.parse(context); + // 8) + Object compacted = new JsonLdApi(opts).compact(activeCtx, null, expanded, + opts.getCompactArrays()); + + // final step of Compaction Algorithm + // TODO: SPEC: the result result is a NON EMPTY array, + if (compacted instanceof List) { + if (((List) compacted).isEmpty()) { + compacted = newMap(); + } else { + final Map tmp = newMap(); + // TODO: SPEC: doesn't specify to use vocab = true here + tmp.put(activeCtx.compactIri(JsonLdConsts.GRAPH, true), compacted); + compacted = tmp; + } + } + if (compacted != null && context != null) { + // TODO: figure out if we can make "@context" appear at the start of + // the keySet + if ((context instanceof Map && !((Map) context).isEmpty()) + || (context instanceof List && !((List) context).isEmpty())) { + + if (context instanceof List && ((List) context).size() == 1 + && opts.getCompactArrays()) { + ((Map) compacted).put(JsonLdConsts.CONTEXT, + ((List) context).get(0)); + } else { + ((Map) compacted).put(JsonLdConsts.CONTEXT, context); + } + } + } + + // 9) + return (Map) compacted; + } + + /** + * Expands the given input according to the steps in the + * Expansion + * algorithm. + * + * @param input The input JSON-LD object. + * @param opts The {@link JsonLdOptions} that are to be sent to the expansion + * algorithm. + * @return The expanded JSON-LD document + * @throws JsonLdError If there is an error while expanding. + */ + public static List expand(Object input, JsonLdOptions opts) throws JsonLdError { + // 1) + // TODO: look into java futures/promises + + // 2) TODO: better verification of DOMString IRI + if (input instanceof String && ((String) input).contains(":")) { + try { + final RemoteDocument tmp = opts.getDocumentLoader().loadDocument((String) input); + input = tmp.document; + // TODO: figure out how to deal with remote context + } catch (final Exception e) { + throw new JsonLdError(Error.LOADING_DOCUMENT_FAILED, e); + } + // if set the base in options should override the base iri in the + // active context + // thus only set this as the base iri if it's not already set in + // options + if (opts.getBase() == null) { + opts.setBase((String) input); + } + } + + // 3) + Context activeCtx = new Context(opts); + // 4) + if (opts.getExpandContext() != null) { + Object exCtx = opts.getExpandContext(); + if (exCtx instanceof Map + && ((Map) exCtx).containsKey(JsonLdConsts.CONTEXT)) { + exCtx = ((Map) exCtx).get(JsonLdConsts.CONTEXT); + } + activeCtx = activeCtx.parse(exCtx); + } + + // 5) + // TODO: add support for getting a context from HTTP when content-type + // is set to a jsonld compatable format + + // 6) + Object expanded = new JsonLdApi(opts).expand(activeCtx, input); + + // final step of Expansion Algorithm + if (expanded instanceof Map && ((Map) expanded).containsKey(JsonLdConsts.GRAPH) + && ((Map) expanded).size() == 1) { + expanded = ((Map) expanded).get(JsonLdConsts.GRAPH); + } else if (expanded == null) { + expanded = new ArrayList(); + } + + // normalize to an array + if (!(expanded instanceof List)) { + final List tmp = new ArrayList(); + tmp.add(expanded); + expanded = tmp; + } + return (List) expanded; + } + + /** + * Expands the given input according to the steps in the + * Expansion + * algorithm, using the default {@link JsonLdOptions}. + * + * @param input The input JSON-LD object. + * @return The expanded JSON-LD document + * @throws JsonLdError If there is an error while expanding. + */ + public static List expand(Object input) throws JsonLdError { + return expand(input, new JsonLdOptions("")); + } + + public static Object flatten(Object input, Object context, JsonLdOptions opts) + throws JsonLdError { + // 2-6) NOTE: these are all the same steps as in expand + final Object expanded = expand(input, opts); + // 7) + if (context instanceof Map + && ((Map) context).containsKey(JsonLdConsts.CONTEXT)) { + context = ((Map) context).get(JsonLdConsts.CONTEXT); + } + // 8) NOTE: blank node generation variables are members of JsonLdApi + // 9) NOTE: the next block is the Flattening Algorithm described in + // http://json-ld.org/spec/latest/json-ld-api/#flattening-algorithm + + // 1) + final Map nodeMap = newMap(); + nodeMap.put(JsonLdConsts.DEFAULT, newMap()); + // 2) + new JsonLdApi(opts).generateNodeMap(expanded, nodeMap); + // 3) + final Map defaultGraph = (Map) nodeMap + .remove(JsonLdConsts.DEFAULT); + // 4) + for (final String graphName : nodeMap.keySet()) { + final Map graph = (Map) nodeMap.get(graphName); + // 4.1+4.2) + Map entry; + if (!defaultGraph.containsKey(graphName)) { + entry = newMap(); + entry.put(JsonLdConsts.ID, graphName); + defaultGraph.put(graphName, entry); + } else { + entry = (Map) defaultGraph.get(graphName); + } + // 4.3) + // TODO: SPEC doesn't specify that this should only be added if it + // doesn't exists + if (!entry.containsKey(JsonLdConsts.GRAPH)) { + entry.put(JsonLdConsts.GRAPH, new ArrayList()); + } + final List keys = new ArrayList(graph.keySet()); + Collections.sort(keys); + for (final String id : keys) { + final Map node = (Map) graph.get(id); + if (!(node.containsKey(JsonLdConsts.ID) && node.size() == 1)) { + ((List) entry.get(JsonLdConsts.GRAPH)).add(node); + } + } + + } + // 5) + final List flattened = new ArrayList(); + // 6) + final List keys = new ArrayList(defaultGraph.keySet()); + Collections.sort(keys); + for (final String id : keys) { + final Map node = (Map) defaultGraph.get(id); + if (!(node.containsKey(JsonLdConsts.ID) && node.size() == 1)) { + flattened.add(node); + } + } + // 8) + if (context != null && !flattened.isEmpty()) { + Context activeCtx = new Context(opts); + activeCtx = activeCtx.parse(context); + // TODO: only instantiate one jsonldapi + Object compacted = new JsonLdApi(opts).compact(activeCtx, null, flattened, + opts.getCompactArrays()); + if (!(compacted instanceof List)) { + final List tmp = new ArrayList(); + tmp.add(compacted); + compacted = tmp; + } + final String alias = activeCtx.compactIri(JsonLdConsts.GRAPH); + final Map rval = activeCtx.serialize(); + rval.put(alias, compacted); + return rval; + } + return flattened; + } + + /** + * Flattens the given input and compacts it using the passed context + * according to the steps in the + * + * Flattening algorithm: + * + * @param input The input JSON-LD object. + * @param opts The {@link JsonLdOptions} that are to be sent to the + * flattening algorithm. + * @return The flattened JSON-LD document + * @throws JsonLdError If there is an error while flattening. + */ + public static Object flatten(Object input, JsonLdOptions opts) throws JsonLdError { + return flatten(input, null, opts); + } + + /** + * Frames the given input using the frame according to the steps in the + * + * Framing Algorithm. + * + * @param input The input JSON-LD object. + * @param frame The frame to use when re-arranging the data of input; either + * in the form of an JSON object or as IRI. + * @param opts The {@link JsonLdOptions} that are to be sent to the framing + * algorithm. + * @return The framed JSON-LD document + * @throws JsonLdError If there is an error while framing. + */ + public static Map frame(Object input, Object frame, JsonLdOptions opts) + throws JsonLdError { + + if (frame instanceof Map) { + frame = JsonLdUtils.clone(frame); + } + // TODO string/IO input + + final Object expandedInput = expand(input, opts); + final List expandedFrame = expand(frame, opts); + + final JsonLdApi api = new JsonLdApi(expandedInput, opts); + final List framed = api.frame(expandedInput, expandedFrame); + final Context activeCtx = api.context + .parse(((Map) frame).get(JsonLdConsts.CONTEXT)); + + Object compacted = api.compact(activeCtx, null, framed, opts.getCompactArrays()); + if (!(compacted instanceof List)) { + final List tmp = new ArrayList(); + tmp.add(compacted); + compacted = tmp; + } + final String alias = activeCtx.compactIri(JsonLdConsts.GRAPH); + final Map rval = activeCtx.serialize(); + rval.put(alias, compacted); + JsonLdUtils.removePreserve(activeCtx, rval, opts); + return rval; + } + + /** + * A registry for RDF Parsers (in this case, JSONLDSerializers) used by + * fromRDF if no specific serializer is specified and options.format is set. + *

+ * TODO: this would fit better in the document loader class + */ + private static Map rdfParsers = new LinkedHashMap() { + { + // automatically register nquad serializer + put(JsonLdConsts.APPLICATION_NQUADS, new NQuadRDFParser()); + } + }; + + public static void registerRDFParser(String format, RDFParser parser) { + rdfParsers.put(format, parser); + } + + public static void removeRDFParser(String format) { + rdfParsers.remove(format); + } + + /** + * Converts an RDF dataset to JSON-LD. + * + * @param dataset a serialized string of RDF in a format specified by the format + * option or an RDF dataset to convert. + * @param options the options to use: [format] the format if input is not an + * array: 'application/nquads' for N-Quads (default). + * [useRdfType] true to use rdf:type, false to use @type + * (default: false). [useNativeTypes] true to convert XSD types + * into native types (boolean, integer, double), false not to + * (default: true). + * @return A JSON-LD object. + * @throws JsonLdError If there is an error converting the dataset to JSON-LD. + */ + public static Object fromRDF(Object dataset, JsonLdOptions options) throws JsonLdError { + // handle non specified serializer case + + RDFParser parser = null; + + if (options.format == null && dataset instanceof String) { + // attempt to parse the input as nquads + options.format = JsonLdConsts.APPLICATION_NQUADS; + } + + if (rdfParsers.containsKey(options.format)) { + parser = rdfParsers.get(options.format); + } else { + throw new JsonLdError(JsonLdError.Error.UNKNOWN_FORMAT, options.format); + } + + // convert from RDF + return fromRDF(dataset, options, parser); + } + + /** + * Converts an RDF dataset to JSON-LD, using the default + * {@link JsonLdOptions}. + * + * @param dataset a serialized string of RDF in a format specified by the format + * option or an RDF dataset to convert. + * @return The JSON-LD object represented by the given RDF dataset + * @throws JsonLdError If there was an error converting from RDF to JSON-LD + */ + public static Object fromRDF(Object dataset) throws JsonLdError { + return fromRDF(dataset, new JsonLdOptions("")); + } + + /** + * Converts an RDF dataset to JSON-LD, using a specific instance of + * {@link RDFParser}. + * + * @param input a serialized string of RDF in a format specified by the format + * option or an RDF dataset to convert. + * @param options the options to use: [format] the format if input is not an + * array: 'application/nquads' for N-Quads (default). + * [useRdfType] true to use rdf:type, false to use @type + * (default: false). [useNativeTypes] true to convert XSD types + * into native types (boolean, integer, double), false not to + * (default: true). + * @param parser A specific instance of {@link RDFParser} to use for the + * conversion. + * @return A JSON-LD object. + * @throws JsonLdError If there is an error converting the dataset to JSON-LD. + */ + public static Object fromRDF(Object input, JsonLdOptions options, RDFParser parser) + throws JsonLdError { + + final RDFDataset dataset = parser.parse(input); + + // convert from RDF + final Object rval = new JsonLdApi(options).fromRDF(dataset); + + // re-process using the generated context if outputForm is set + if (options.outputForm != null) { + if (JsonLdConsts.EXPANDED.equals(options.outputForm)) { + return rval; + } else if (JsonLdConsts.COMPACTED.equals(options.outputForm)) { + return compact(rval, dataset.getContext(), options); + } else if (JsonLdConsts.FLATTENED.equals(options.outputForm)) { + return flatten(rval, dataset.getContext(), options); + } else { + throw new JsonLdError(JsonLdError.Error.UNKNOWN_ERROR, + "Output form was unknown: " + options.outputForm); + } + } + return rval; + } + + /** + * Converts an RDF dataset to JSON-LD, using a specific instance of + * {@link RDFParser}, and the default {@link JsonLdOptions}. + * + * @param input a serialized string of RDF in a format specified by the format + * option or an RDF dataset to convert. + * @param parser A specific instance of {@link RDFParser} to use for the + * conversion. + * @return A JSON-LD object. + * @throws JsonLdError If there is an error converting the dataset to JSON-LD. + */ + public static Object fromRDF(Object input, RDFParser parser) throws JsonLdError { + return fromRDF(input, new JsonLdOptions(""), parser); + } + + /** + * Outputs the RDF dataset found in the given JSON-LD object. + * + * @param input the JSON-LD input. + * @param callback A callback that is called when the input has been converted to + * Quads (null to use options.format instead). + * @param options the options to use: [base] the base IRI to use. [format] the + * format to use to output a string: 'application/nquads' for + * N-Quads (default). [loadContext(url, callback(err, url, + * result))] the context loader. + * @return The result of executing + * {@link JsonLdTripleCallback#call(RDFDataset)} on the results, or + * if {@link JsonLdOptions#format} is not null, a result in that + * format if it is found, or otherwise the raw {@link RDFDataset}. + * @throws JsonLdError If there is an error converting the dataset to JSON-LD. + */ + public static Object toRDF(Object input, JsonLdTripleCallback callback, JsonLdOptions options) + throws JsonLdError { + + final Object expandedInput = expand(input, options); + + final JsonLdApi api = new JsonLdApi(expandedInput, options); + final RDFDataset dataset = api.toRDF(); + + // generate namespaces from context + if (options.useNamespaces) { + List> _input; + if (input instanceof List) { + _input = (List>) input; + } else { + _input = new ArrayList>(); + _input.add((Map) input); + } + for (final Map e : _input) { + if (e.containsKey(JsonLdConsts.CONTEXT)) { + dataset.parseContext(e.get(JsonLdConsts.CONTEXT)); + } + } + } + + if (callback != null) { + return callback.call(dataset); + } + + if (options.format != null) { + if (JsonLdConsts.APPLICATION_NQUADS.equals(options.format)) { + return new NQuadTripleCallback().call(dataset); + } else { + throw new JsonLdError(JsonLdError.Error.UNKNOWN_FORMAT, options.format); + } + } + return dataset; + } + + /** + * Outputs the RDF dataset found in the given JSON-LD object. + * + * @param input the JSON-LD input. + * @param options the options to use: [base] the base IRI to use. [format] the + * format to use to output a string: 'application/nquads' for + * N-Quads (default). [loadContext(url, callback(err, url, + * result))] the context loader. + * @return A JSON-LD object. + * @throws JsonLdError If there is an error converting the dataset to JSON-LD. + */ + public static Object toRDF(Object input, JsonLdOptions options) throws JsonLdError { + return toRDF(input, null, options); + } + + /** + * Outputs the RDF dataset found in the given JSON-LD object, using the + * default {@link JsonLdOptions}. + * + * @param input the JSON-LD input. + * @param callback A callback that is called when the input has been converted to + * Quads (null to use options.format instead). + * @return A JSON-LD object. + * @throws JsonLdError If there is an error converting the dataset to JSON-LD. + */ + public static Object toRDF(Object input, JsonLdTripleCallback callback) throws JsonLdError { + return toRDF(input, callback, new JsonLdOptions("")); + } + + /** + * Outputs the RDF dataset found in the given JSON-LD object, using the + * default {@link JsonLdOptions}. + * + * @param input the JSON-LD input. + * @return A JSON-LD object. + * @throws JsonLdError If there is an error converting the dataset to JSON-LD. + */ + public static Object toRDF(Object input) throws JsonLdError { + return toRDF(input, new JsonLdOptions("")); + } + + /** + * Performs RDF dataset normalization on the given JSON-LD input. The output + * is an RDF dataset unless the 'format' option is used. + * + * @param input the JSON-LD input to normalize. + * @param options the options to use: [base] the base IRI to use. [format] the + * format if output is a string: 'application/nquads' for + * N-Quads. [loadContext(url, callback(err, url, result))] the + * context loader. + * @return The JSON-LD object + * @throws JsonLdError If there is an error normalizing the dataset. + */ + + + public static Object normalize(Object input, JsonLdOptions options) throws JsonLdError { + final JsonLdOptions opts = new JsonLdOptions(options.getBase()); + opts.format = null; + final RDFDataset dataset = (RDFDataset) toRDF(input, opts); + + return new JsonLdApi(options).normalize(dataset, options); + } + + + /** + * Performs RDF dataset normalization on the given JSON-LD input. The output + * is an RDF dataset unless the 'format' option is used. Uses the default + * {@link JsonLdOptions}. + * + * @param input the JSON-LD input to normalize. + * @return The JSON-LD object + * @throws JsonLdError If there is an error normalizing the dataset. + */ + + + public static Object normalize(Object input) throws JsonLdError { + return normalize(input, new JsonLdOptions("")); + } + +} \ No newline at end of file diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdTripleCallback.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdTripleCallback.java new file mode 100644 index 0000000..f626bf2 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdTripleCallback.java @@ -0,0 +1,44 @@ +package com.github.jsonldjava.core; + +/** + * + * @author Tristan + * + * TODO: in the JSONLD RDF API the callback we're representing here is + * QuadCallback which takes a list of quads (subject, predicat, object, + * graph). for the moment i'm just going to use the dataset provided by + * toRDF but this should probably change in the future + */ +public interface JsonLdTripleCallback { + + /** + * Construct output based on internal RDF dataset format + * + * @param dataset + * The format of the dataset is a Map with the following + * structure: { GRAPH_1: [ TRIPLE_1, TRIPLE_2, ..., TRIPLE_N ], + * GRAPH_2: [ TRIPLE_1, TRIPLE_2, ..., TRIPLE_N ], ... GRAPH_N: [ + * TRIPLE_1, TRIPLE_2, ..., TRIPLE_N ] } + * + * GRAPH: Is the graph name/IRI. if no graph is present for a + * triple, it will be listed under the "@default" graph TRIPLE: + * Is a map with the following structure: { "subject" : SUBJECT + * "predicate" : PREDICATE "object" : OBJECT } + * + * Each of the values in the triple map are also maps with the + * following key-value pairs: "value" : The value of the node. + * "subject" can be an IRI or blank node id. "predicate" should + * only ever be an IRI "object" can be and IRI or blank node id, + * or a literal value (represented as a string) "type" : "IRI" if + * the value is an IRI or "blank node" if the value is a blank + * node. "object" can also be "literal" in the case of literals. + * The value of "object" can also contain the following optional + * key-value pairs: "language" : the language value of a string + * literal "datatype" : the datatype of the literal. (if not set + * will default to XSD:string, if set to null, null will be + * used). + * + * @return the resulting RDF object in the desired format + */ + public Object call(RDFDataset dataset); +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdUtils.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdUtils.java new file mode 100644 index 0000000..664e34d --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/JsonLdUtils.java @@ -0,0 +1,487 @@ +package com.github.jsonldjava.core; + +import com.github.jsonldjava.utils.Obj; + +import java.util.*; + +public class JsonLdUtils { + + private static final int MAX_CONTEXT_URLS = 10; + + /** + * Returns whether or not the given value is a keyword (or a keyword alias). + * + * @param key + * the value to check. + * @return true if the value is a keyword, false if not. + */ + static boolean isKeyword(Object key) { + if (!isString(key)) { + return false; + } + return "@base".equals(key) || "@context".equals(key) || "@container".equals(key) + || "@default".equals(key) || "@embed".equals(key) || "@explicit".equals(key) + || "@graph".equals(key) || "@id".equals(key) || "@index".equals(key) + || "@language".equals(key) || "@list".equals(key) || "@omitDefault".equals(key) + || "@reverse".equals(key) || "@preserve".equals(key) || "@set".equals(key) + || "@type".equals(key) || "@value".equals(key) || "@vocab".equals(key) + || "@requireAll".equals(key); + } + + public static Boolean deepCompare(Object v1, Object v2, Boolean listOrderMatters) { + if (v1 == null) { + return v2 == null; + } else if (v2 == null) { + return v1 == null; + } else if (v1 instanceof Map && v2 instanceof Map) { + final Map m1 = (Map) v1; + final Map m2 = (Map) v2; + if (m1.size() != m2.size()) { + return false; + } + for (final String key : m1.keySet()) { + if (!m2.containsKey(key) + || !deepCompare(m1.get(key), m2.get(key), listOrderMatters)) { + return false; + } + } + return true; + } else if (v1 instanceof List && v2 instanceof List) { + final List l1 = (List) v1; + final List l2 = (List) v2; + if (l1.size() != l2.size()) { + + return false; + } + // used to mark members of l2 that we have already matched to avoid + // matching the same item twice for lists that have duplicates + final boolean alreadyMatched[] = new boolean[l2.size()]; + for (int i = 0; i < l1.size(); i++) { + final Object o1 = l1.get(i); + Boolean gotmatch = false; + if (listOrderMatters) { + gotmatch = deepCompare(o1, l2.get(i), listOrderMatters); + } else { + for (int j = 0; j < l2.size(); j++) { + if (!alreadyMatched[j] && deepCompare(o1, l2.get(j), listOrderMatters)) { + alreadyMatched[j] = true; + gotmatch = true; + break; + } + } + } + if (!gotmatch) { + return false; + } + } + return true; + } else { + return v1.equals(v2); + } + } + + public static Boolean deepCompare(Object v1, Object v2) { + return deepCompare(v1, v2, false); + } + + public static boolean deepContains(List values, Object value) { + for (final Object item : values) { + if (deepCompare(item, value, false)) { + return true; + } + } + return false; + } + + static void mergeValue(Map obj, String key, Object value) { + if (obj == null) { + return; + } + List values = (List) obj.get(key); + if (values == null) { + values = new ArrayList(); + obj.put(key, values); + } + if ("@list".equals(key) + || (value instanceof Map && ((Map) value).containsKey("@list")) + || !deepContains(values, value)) { + values.add(value); + } + } + + static void laxMergeValue(Map obj, String key, Object value) { + if (obj == null) { + return; + } + List values = (List) obj.get(key); + if (values == null) { + values = new ArrayList(); + obj.put(key, values); + } + // if ("@list".equals(key) + // || (value instanceof Map && ((Map) + // value).containsKey("@list")) + // || !deepContains(values, value) + // ) { + values.add(value); + // } + } + + public static boolean isAbsoluteIri(String value) { + // TODO: this is a bit simplistic! + return value.contains(":"); + } + + /** + * Returns true if the given value is a subject with properties. + * + * @param v + * the value to check. + * + * @return true if the value is a subject with properties, false if not. + */ + static boolean isNode(Object v) { + // Note: A value is a subject if all of these hold true: + // 1. It is an Object. + // 2. It is not a @value, @set, or @list. + // 3. It has more than 1 key OR any existing key is not @id. + if (v instanceof Map && !(((Map) v).containsKey("@value") || ((Map) v).containsKey("@set") + || ((Map) v).containsKey("@list"))) { + return ((Map) v).size() > 1 || !((Map) v).containsKey("@id"); + } + return false; + } + + /** + * Returns true if the given value is a subject reference. + * + * @param v + * the value to check. + * + * @return true if the value is a subject reference, false if not. + */ + static boolean isNodeReference(Object v) { + // Note: A value is a subject reference if all of these hold true: + // 1. It is an Object. + // 2. It has a single key: @id. + return (v instanceof Map && ((Map) v).size() == 1 + && ((Map) v).containsKey("@id")); + } + + // TODO: fix this test + public static boolean isRelativeIri(String value) { + if (!(isKeyword(value) || isAbsoluteIri(value))) { + return true; + } + return false; + } + + /** + * Removes the @preserve keywords as the last step of the framing algorithm. + * + * @param ctx + * the active context used to compact the input. + * @param input + * the framed, compacted output. + * @param opts + * the compaction options used. + * + * @return the resulting output. + * @throws JsonLdError + */ + static Object removePreserve(Context ctx, Object input, JsonLdOptions opts) throws JsonLdError { + // recurse through arrays + if (isArray(input)) { + final List output = new ArrayList(); + for (final Object i : (List) input) { + final Object result = removePreserve(ctx, i, opts); + // drop nulls from arrays + if (result != null) { + output.add(result); + } + } + input = output; + } else if (isObject(input)) { + // remove @preserve + if (((Map) input).containsKey("@preserve")) { + if ("@null".equals(((Map) input).get("@preserve"))) { + return null; + } + return ((Map) input).get("@preserve"); + } + + // skip @values + if (isValue(input)) { + return input; + } + + // recurse through @lists + if (isList(input)) { + ((Map) input).put("@list", + removePreserve(ctx, ((Map) input).get("@list"), opts)); + return input; + } + + // recurse through properties + for (final String prop : ((Map) input).keySet()) { + Object result = removePreserve(ctx, ((Map) input).get(prop), opts); + final String container = ctx.getContainer(prop); + if (opts.getCompactArrays() && isArray(result) + && ((List) result).size() == 1 && container == null) { + result = ((List) result).get(0); + } + ((Map) input).put(prop, result); + } + } + return input; + } + + /** + * Removes the @id member of each node object where the member value is a + * blank node identifier which appears only once in any property value + * within input. + * + * @param input + * the framed output before compaction + */ + + static void pruneBlankNodes(final Object input) { + final Map toPrune = new HashMap<>(); + fillNodesToPrune(input, toPrune); + for (final String id : toPrune.keySet()) { + final Object node = toPrune.get(id); + if (node == null) { + continue; + } + ((Map) node).remove(JsonLdConsts.ID); + } + } + + /** + * Gets the objects on which we'll prune the blank node ID + * + * @param input + * the framed output before compaction + * @param toPrune + * the resulting object. + */ + static void fillNodesToPrune(Object input, final Map toPrune) { + // recurse through arrays + if (isArray(input)) { + for (final Object i : (List) input) { + fillNodesToPrune(i, toPrune); + } + } else if (isObject(input)) { + // skip @values + if (isValue(input)) { + return; + } + // recurse through @lists + if (isList(input)) { + fillNodesToPrune(((Map) input).get("@list"), toPrune); + return; + } + // recurse through properties + for (final String prop : new LinkedHashSet<>(((Map) input).keySet())) { + if (prop.equals(JsonLdConsts.ID)) { + final String id = (String) ((Map) input).get(JsonLdConsts.ID); + if (id.startsWith("_:")) { + // if toPrune contains the id already, it was already + // present somewhere else, + // so we just null the value + if (toPrune.containsKey(id)) { + toPrune.put(id, null); + } else { + // else we add the object as the value + toPrune.put(id, input); + } + } + } else { + fillNodesToPrune(((Map) input).get(prop), toPrune); + } + } + } else if (input instanceof String) { + // this is an id, as non-id values will have been discarded by the + // isValue() above + final String p = (String) input; + if (p.startsWith("_:")) { + // the id is outside of the context of an @id property, if we're + // in that case, + // then we're referencing a blank node id so this id should not + // be removed + toPrune.put(p, null); + } + } + } + + /** + * Compares two strings first based on length and then lexicographically. + * + * @param a + * the first string. + * @param b + * the second string. + * + * @return -1 if a < b, 1 if a > b, 0 if a == b. + */ + static int compareShortestLeast(String a, String b) { + if (a.length() < b.length()) { + return -1; + } else if (b.length() < a.length()) { + return 1; + } + return Integer.signum(a.compareTo(b)); + } + + /** + * Compares two JSON-LD values for equality. Two JSON-LD values will be + * considered equal if: + * + * 1. They are both primitives of the same type and value. 2. They are + * both @values with the same @value, @type, and @language, OR 3. They both + * have @ids they are the same. + * + * @param v1 + * the first value. + * @param v2 + * the second value. + * + * @return true if v1 and v2 are considered equal, false if not. + */ + static boolean compareValues(Object v1, Object v2) { + if (v1.equals(v2)) { + return true; + } + + if (isValue(v1) && isValue(v2) + && Obj.equals(((Map) v1).get("@value"), + ((Map) v2).get("@value")) + && Obj.equals(((Map) v1).get("@type"), + ((Map) v2).get("@type")) + && Obj.equals(((Map) v1).get("@language"), + ((Map) v2).get("@language")) + && Obj.equals(((Map) v1).get("@index"), + ((Map) v2).get("@index"))) { + return true; + } + + if ((v1 instanceof Map && ((Map) v1).containsKey("@id")) + && (v2 instanceof Map && ((Map) v2).containsKey("@id")) + && ((Map) v1).get("@id") + .equals(((Map) v2).get("@id"))) { + return true; + } + + return false; + } + + /** + * Returns true if the given value is a blank node. + * + * @param v + * the value to check. + * + * @return true if the value is a blank node, false if not. + */ + static boolean isBlankNode(Object v) { + // Note: A value is a blank node if all of these hold true: + // 1. It is an Object. + // 2. If it has an @id key its value begins with '_:'. + // 3. It has no keys OR is not a @value, @set, or @list. + if (v instanceof Map) { + final Map map = (Map) v; + if (map.containsKey("@id")) { + return ((String) map.get("@id")).startsWith("_:"); + } else { + return map.isEmpty() || !map.containsKey("@value") || map.containsKey("@set") + || map.containsKey("@list"); + } + } + return false; + } + + static Object clone(Object value) {// throws + // CloneNotSupportedException { + Object rval = null; + if (value instanceof Cloneable) { + try { + rval = value.getClass().getMethod("clone").invoke(value); + } catch (final Exception e) { + rval = e; + } + } + if (rval == null || rval instanceof Exception) { + // the object wasn't cloneable, or an error occured + if (value == null || value instanceof String || value instanceof Number + || value instanceof Boolean) { + // strings numbers and booleans are immutable + rval = value; + } else { + // TODO: making this throw runtime exception so it doesn't have + // to be caught + // because simply it should never fail in the case of JSON-LD + // and means that + // the input JSON-LD is invalid + throw new RuntimeException(new CloneNotSupportedException( + (rval instanceof Exception ? ((Exception) rval).getMessage() : ""))); + } + } + return rval; + } + + /** + * Returns true if the given value is a JSON-LD Array + * + * @param v + * the value to check. + * @return + */ + static Boolean isArray(Object v) { + return (v instanceof List); + } + + /** + * Returns true if the given value is a JSON-LD List + * + * @param v + * the value to check. + * @return + */ + static Boolean isList(Object v) { + return (v instanceof Map && ((Map) v).containsKey("@list")); + } + + /** + * Returns true if the given value is a JSON-LD Object + * + * @param v + * the value to check. + * @return + */ + static Boolean isObject(Object v) { + return (v instanceof Map); + } + + /** + * Returns true if the given value is a JSON-LD value + * + * @param v + * the value to check. + * @return + */ + static Boolean isValue(Object v) { + return (v instanceof Map && ((Map) v).containsKey("@value")); + } + + /** + * Returns true if the given value is a JSON-LD string + * + * @param v + * the value to check. + * @return + */ + static Boolean isString(Object v) { + // TODO: should this return true for arrays of strings as well? + return (v instanceof String); + } +} \ No newline at end of file diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/NormalizeUtils.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/NormalizeUtils.java new file mode 100644 index 0000000..e368b41 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/NormalizeUtils.java @@ -0,0 +1,627 @@ +package com.github.jsonldjava.core; + +import com.github.jsonldjava.utils.Obj; + +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.*; + +import static com.github.jsonldjava.core.RDFDatasetUtils.parseNQuads; +import static com.github.jsonldjava.core.RDFDatasetUtils.toNQuad; + +/** + * Created by noah on 10/04/17. + */ + +public class NormalizeUtils { + + private final UniqueNamer namer; + private final Map bnodes; + private final List quads; + private final JsonLdOptions options; + + public NormalizeUtils(List quads, Map bnodes, UniqueNamer namer, + JsonLdOptions options) { + this.options = options; + this.quads = quads; + this.bnodes = bnodes; + this.namer = namer; + } + + // generates unique and duplicate hashes for bnodes + public Object hashBlankNodes(Collection unnamed_) throws JsonLdError { // Noah you need to check this out!!! + List unnamed = new ArrayList(unnamed_); + List nextUnnamed = new ArrayList(); + Map> duplicates = new LinkedHashMap>(); + Map unique = new LinkedHashMap(); + + // NOTE: not using the same structure as javascript here to avoid + // possible stack overflows + // hash quads for each unnamed bnode + for (int hui = 0; ; hui++) { + if (hui == unnamed.size()) { + // done, name blank nodes + Boolean named = false; + List hashes = new ArrayList(unique.keySet()); + Collections.sort(hashes); + for (final String hash : hashes) { + final String bnode = unique.get(hash); + namer.getName(bnode); + named = true; + } + + // continue to hash bnodes if a bnode was assigned a name + if (named) { + // this resets the initial variables, so it seems like it + // has to go on the stack + // but since this is the end of the function either way, it + // might not have to + // hashBlankNodes(unnamed); + hui = -1; + unnamed = nextUnnamed; + nextUnnamed = new ArrayList(); + duplicates = new LinkedHashMap>(); + unique = new LinkedHashMap(); + continue; + } + // name the duplicate hash bnods + else { + // names duplicate hash bnodes + // enumerate duplicate hash groups in sorted order + hashes = new ArrayList(duplicates.keySet()); + Collections.sort(hashes); + + // process each group + for (int pgi = 0; ; pgi++) { + if (pgi == hashes.size()) { + // done, create JSON-LD array + // return createArray(); + final List normalized = new ArrayList(); + + // Note: At this point all bnodes in the set of RDF + // quads have been + // assigned canonical names, which have been stored + // in the 'namer' object. + // Here each quad is updated by assigning each of + // its bnodes its new name + // via the 'namer' object + + // update bnode names in each quad and serialize + for (int cai = 0; cai < quads.size(); ++cai) { + final Map quad = (Map) quads + .get(cai); + for (final String attr : new String[]{"subject", "object", + "name"}) { + if (quad.containsKey(attr)) { + final Map qa = (Map) quad + .get(attr); + if (qa != null && "blank node".equals(qa.get("type")) + && ((String) qa.get("value")) + .indexOf("_:c14n") != 0) { + qa.put("value", + namer.getName((String) qa.get(("value")))); + } + } + } + normalized.add(toNQuad((RDFDataset.Quad) quad, + quad.containsKey("name") && quad.get("name") != null + ? (String) ((Map) quad.get("name")) + .get("value") + : null)); + } + + // sort normalized output + Collections.sort(normalized); + + // handle output format + if (options.format != null) { + if ("application/nquads".equals(options.format)) { + final StringBuilder rval = new StringBuilder(); + for (final String n : normalized) { + rval.append(n); + } + return rval.toString(); + } else { + throw new JsonLdError(JsonLdError.Error.UNKNOWN_FORMAT, + options.format); + } + } + final StringBuilder rval = new StringBuilder(); + for (final String n : normalized) { + rval.append(n); + } + return parseNQuads(rval.toString()); + } + + // name each group member + final List group = duplicates.get(hashes.get(pgi)); + final List results = new ArrayList(); + for (int n = 0; ; n++) { + if (n == group.size()) { + // name bnodes in hash order + Collections.sort(results, new Comparator() { + @Override + public int compare(HashResult a, HashResult b) { + final int res = a.hash.compareTo(b.hash); + return res; + } + }); + for (final HashResult r : results) { + // name all bnodes in path namer in + // key-entry order + // Note: key-order is preserved in + // javascript + for (final String key : r.pathNamer.existing().keySet()) { + namer.getName(key); + } + } + // processGroup(i+1); + break; + } else { + // skip already-named bnodes + final String bnode = group.get(n); + if (namer.isNamed(bnode)) { + continue; + } + + // hash bnode paths + final UniqueNamer pathNamer = new UniqueNamer("_:b"); + pathNamer.getName(bnode); + + final HashResult result = hashPaths(bnode, bnodes, namer, + pathNamer); + results.add(result); + } + } + } + } + } + + // hash unnamed bnode + final String bnode = unnamed.get(hui); + final String hash = hashQuads(bnode, bnodes, namer); + + // store hash as unique or a duplicate + if (duplicates.containsKey(hash)) { + duplicates.get(hash).add(bnode); + nextUnnamed.add(bnode); + } else if (unique.containsKey(hash)) { + final List tmp = new ArrayList(); + tmp.add(unique.get(hash)); + tmp.add(bnode); + duplicates.put(hash, tmp); + nextUnnamed.add(unique.get(hash)); + nextUnnamed.add(bnode); + unique.remove(hash); + } else { + unique.put(hash, bnode); + } + } + } + + public static List sortMapKeys(Map map) { // need to reverse list + List keyList = new ArrayList<>(map.keySet()); + Collections.sort(keyList); + + return keyList; + } + + public static List> sortMapList(List> mapList) { + return sortMapList(mapList, true); + } + + public static List> sortMapList(List> mapList, boolean recursion) { + List> sortedMapsList = new ArrayList<>(); + for(Map map: mapList) { + Map newMap = new HashMap<>(); + List keyList = new ArrayList<>(map.keySet()); + Collections.sort(keyList); + + for(String key: keyList) { + newMap.put(key, map.get(key)); + } + sortedMapsList.add(newMap); + } + if (recursion) { + return sortMapList(sortedMapsList, false); + } + return sortedMapsList; + + + } + + + + + + //1) Initialize nquads to an empty list. It will be used to store quads + // in N-Quads format. + + private static class HashResult { + String hash; + UniqueNamer pathNamer; + } + + /** + * Produces a hash for the paths of adjacent bnodes for a bnode, + * incorporating all information about its subgraph of bnodes. This method + * will recursively pick adjacent bnode permutations that produce the + * lexicographically-least 'path' serializations. + * + * @param id the ID of the bnode to hash paths for. + * @param bnodes the map of bnode quads. + * @param namer the canonical bnode namer. + * @param pathNamer the namer used to assign names to adjacent bnodes. + */ + private static HashResult hashPaths(String id, Map bnodes, UniqueNamer namer, + UniqueNamer pathNamer) { + try { + // create SHA-1 digest + final MessageDigest md = MessageDigest.getInstance("SHA-1"); + + final Map> groups = new LinkedHashMap>(); + List groupHashes; + final List quads = (List) ((Map) bnodes.get(id)) + .get("quads"); + + for (int hpi = 0; ; hpi++) { + if (hpi == quads.size()) { + // done , hash groups + groupHashes = new ArrayList(groups.keySet()); + Collections.sort(groupHashes); + for (int hgi = 0; ; hgi++) { + if (hgi == groupHashes.size()) { + final HashResult res = new HashResult(); + res.hash = encodeHex(md.digest()); + res.pathNamer = pathNamer; + return res; + } + + // digest group hash + final String groupHash = groupHashes.get(hgi); + md.update(groupHash.getBytes("UTF-8")); + + // choose a path and namer from the permutations + String chosenPath = null; + UniqueNamer chosenNamer = null; + final Permutator permutator = new Permutator(groups.get(groupHash)); + while (true) { + Boolean contPermutation = false; + Boolean breakOut = false; + final List permutation = permutator.next(); + UniqueNamer pathNamerCopy = pathNamer.clone(); + + // build adjacent path + String path = ""; + final List recurse = new ArrayList(); + for (final String bnode : permutation) { + // use canonical name if available + if (namer.isNamed(bnode)) { + path += namer.getName(bnode); + } else { + // recurse if bnode isn't named in the path + // yet + if (!pathNamerCopy.isNamed(bnode)) { + recurse.add(bnode); + } + path += pathNamerCopy.getName(bnode); + } + + // skip permutation if path is already >= chosen + // path + if (chosenPath != null && path.length() >= chosenPath.length() + && path.compareTo(chosenPath) > 0) { + // return nextPermutation(true); + if (permutator.hasNext()) { + contPermutation = true; + } else { + // digest chosen path and update namer + md.update(chosenPath.getBytes("UTF-8")); + pathNamer = chosenNamer; + // hash the nextGroup + breakOut = true; + } + break; + } + } + + // if we should do the next permutation + if (contPermutation) { + continue; + } + // if we should stop processing this group + if (breakOut) { + break; + } + + // does the next recursion + for (int nrn = 0; ; nrn++) { + if (nrn == recurse.size()) { + // return nextPermutation(false); + if (chosenPath == null || path.compareTo(chosenPath) < 0) { + chosenPath = path; + chosenNamer = pathNamerCopy; + } + if (!permutator.hasNext()) { + // digest chosen path and update namer + md.update(chosenPath.getBytes("UTF-8")); + pathNamer = chosenNamer; + // hash the nextGroup + breakOut = true; + } + break; + } + + // do recursion + final String bnode = recurse.get(nrn); + final HashResult result = hashPaths(bnode, bnodes, namer, + pathNamerCopy); + path += pathNamerCopy.getName(bnode) + "<" + result.hash + ">"; + pathNamerCopy = result.pathNamer; + + // skip permutation if path is already >= chosen + // path + if (chosenPath != null && path.length() >= chosenPath.length() + && path.compareTo(chosenPath) > 0) { + // return nextPermutation(true); + if (!permutator.hasNext()) { + // digest chosen path and update namer + md.update(chosenPath.getBytes("UTF-8")); + pathNamer = chosenNamer; + // hash the nextGroup + breakOut = true; + } + break; + } + // do next recursion + } + + // if we should stop processing this group + if (breakOut) { + break; + } + } + } + } + + // get adjacent bnode + final Map quad = (Map) quads.get(hpi); + String bnode = getAdjacentBlankNodeName((Map) quad.get("subject"), + id); + String direction = null; + if (bnode != null) { + // normal property + direction = "p"; + } else { + bnode = getAdjacentBlankNodeName((Map) quad.get("object"), id); + if (bnode != null) { + // reverse property + direction = "r"; + } + } + + if (bnode != null) { + // get bnode name (try canonical, path, then hash) + String name; + if (namer.isNamed(bnode)) { + name = namer.getName(bnode); + } else if (pathNamer.isNamed(bnode)) { + name = pathNamer.getName(bnode); + } else { + name = hashQuads(bnode, bnodes, namer); + } + + // hash direction, property, end bnode name/hash + final MessageDigest md1 = MessageDigest.getInstance("SHA-1"); + // String toHash = direction + (String) ((Map) quad.get("predicate")).get("value") + name; + md1.update(direction.getBytes("UTF-8")); + md1.update(((String) ((Map) quad.get("predicate")).get("value")) + .getBytes("UTF-8")); + md1.update(name.getBytes("UTF-8")); + final String groupHash = encodeHex(md1.digest()); + if (groups.containsKey(groupHash)) { + groups.get(groupHash).add(bnode); + } else { + final List tmp = new ArrayList(); + tmp.add(bnode); + groups.put(groupHash, tmp); + } + } + } + } catch (final NoSuchAlgorithmException e) { + // TODO: i don't expect that SHA-1 is even NOT going to be + // available? + // look into this further + throw new RuntimeException(e); + } catch (final UnsupportedEncodingException e) { + // TODO: i don't expect that UTF-8 is ever not going to be available + // either + throw new RuntimeException(e); + } + } + + /** + * Hashes all of the quads about a blank node. + * + * @param id the ID of the bnode to hash quads for. + * @param bnodes the mapping of bnodes to quads. + * @param namer the canonical bnode namer. + * @return the new hash. + */ + private static String hashQuads(String id, Map bnodes, UniqueNamer namer) { + // return cached hash + if (((Map) bnodes.get(id)).containsKey("hash")) { + return (String) ((Map) bnodes.get(id)).get("hash"); + } + + // serialize all of bnode's quads + final List> quads = (List>) ((Map) bnodes + .get(id)).get("quads"); + final List nquads = new ArrayList(); + for (int i = 0; i < quads.size(); ++i) { + nquads.add(toNQuad((RDFDataset.Quad) quads.get(i), + quads.get(i).get("name") != null + ? (String) ((Map) quads.get(i).get("name")).get("value") + : null, + id)); + } + // sort serialized quads + Collections.sort(nquads); + // return hashed quads + final String hash = sha1hash(nquads); + ((Map) bnodes.get(id)).put("hash", hash); + return hash; + } + + /** + * A helper class to sha1 hash all the strings in a collection + * + * @param nquads + * @return + */ + private static String sha1hash(Collection nquads) { + try { + // create SHA-1 digest + final MessageDigest md = MessageDigest.getInstance("SHA-1"); + for (final String nquad : nquads) { + md.update(nquad.getBytes("UTF-8")); + } + return encodeHex(md.digest()); + } catch (final NoSuchAlgorithmException | UnsupportedEncodingException e) { + throw new RuntimeException(e); + } + } + + public static String sha256HashnQuads(List nquads) { + String stringToHash = ""; + for (String nquad : nquads) { + stringToHash += nquad; + } + return sha256Hash(stringToHash.getBytes()); + + } + + public static String sha256Hash(String string) { + return sha256Hash(string.getBytes()); + } + + public static String sha256Hash(byte[] bytes) { + return encodeHex(sha256Raw(bytes)); + } + + public static byte[] sha256Raw(byte[] bytes) { + byte[] hash = null; + try { + MessageDigest sha = MessageDigest.getInstance("SHA-256"); // may need to hex digest + sha.update(bytes); + hash = sha.digest(); + } catch (NoSuchAlgorithmException e) { + e.printStackTrace(); + } + return hash; + + } + + // TODO: this is something to optimize + public static String encodeHex(final byte[] data) { + String rval = ""; + for (final byte b : data) { + rval += String.format("%02x", b); + } + return rval; + } + + /** + * A helper function that gets the blank node name from an RDF quad node + * (subject or object). If the node is a blank node and its value does not + * match the given blank node ID, it will be returned. + * + * @param node the RDF quad node. + * @param id the ID of the blank node to look next to. + * @return the adjacent blank node name or null if none was found. + */ + private static String getAdjacentBlankNodeName(Map node, String id) { + return "blank node".equals(node.get("type")) + && (!node.containsKey("value") || !Obj.equals(node.get("value"), id)) + ? (String) node.get("value") : null; + } + + public static class Permutator { + + private final List list; + private boolean done; + private final Map left; + + public Permutator(List list) { + this.list = (List) JsonLdUtils.clone(list); + Collections.sort(this.list); + this.done = false; + this.left = new LinkedHashMap(); + for (final String i : this.list) { + this.left.put(i, true); + } + } + + /** + * Returns true if there is another permutation. + * + * @return true if there is another permutation, false if not. + */ + public boolean hasNext() { + return !this.done; + } + + /** + * Gets the next permutation. Call hasNext() to ensure there is another + * one first. + * + * @return the next permutation. + */ + public List next() { + final List rval = (List) JsonLdUtils.clone(this.list); + + // Calculate the next permutation using Steinhaus-Johnson-Trotter + // permutation algoritm + + // get largest mobile element k + // (mobile: element is grater than the one it is looking at) + String k = null; + int pos = 0; + final int length = this.list.size(); + for (int i = 0; i < length; ++i) { + final String element = this.list.get(i); + final Boolean left = this.left.get(element); + if ((k == null || element.compareTo(k) > 0) + && ((left && i > 0 && element.compareTo(this.list.get(i - 1)) > 0) + || (!left && i < (length - 1) + && element.compareTo(this.list.get(i + 1)) > 0))) { + k = element; + pos = i; + } + } + + // no more permutations + if (k == null) { + this.done = true; + } else { + // swap k and the element it is looking at + final int swap = this.left.get(k) ? pos - 1 : pos + 1; + this.list.set(pos, this.list.get(swap)); + this.list.set(swap, k); + + // reverse the direction of all element larger than k + for (int i = 0; i < length; i++) { + if (this.list.get(i).compareTo(k) > 0) { + this.left.put(this.list.get(i), !this.left.get(this.list.get(i))); + } + } + } + + return rval; + } + + } + +} \ No newline at end of file diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/RDFDataset.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/RDFDataset.java new file mode 100644 index 0000000..1083594 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/RDFDataset.java @@ -0,0 +1,714 @@ +package com.github.jsonldjava.core; + + +import java.text.DecimalFormat; +import java.text.DecimalFormatSymbols; +import java.util.*; +import java.util.regex.Pattern; + +import static com.github.jsonldjava.core.JsonLdConsts.*; +import static com.github.jsonldjava.core.JsonLdUtils.*; +import static com.github.jsonldjava.utils.Obj.newMap; + +/** + * Created by noah on 09/04/17. + */ + +/** + * Starting to migrate away from using plain java Maps as the internal RDF + * dataset store. Currently each item just wraps a Map based on the old format + * so everything doesn't break. Will phase this out once everything is using the + * new format. + * + * @author Tristan + * + */ +public class RDFDataset extends LinkedHashMap { + private static final long serialVersionUID = 2796344994239879165L; + + private static final Pattern PATTERN_INTEGER = Pattern.compile("^[\\-+]?[0-9]+$"); + private static final Pattern PATTERN_DOUBLE = Pattern + .compile("^(\\+|-)?([0-9]+(\\.[0-9]*)?|\\.[0-9]+)([Ee](\\+|-)?[0-9]+)?$"); + + public static class Quad extends LinkedHashMap implements Comparable { + private static final long serialVersionUID = -7021918051975883082L; + + public Quad(final Map> component, final String graph) { + this(component.get("subject"), component.get("predicate"), component.get("object"), graph); + } + + public Quad(final Map subject, final Map predicate, + final Map object, final String graph) { + this(subject.get("type").equals("blank node") ? new BlankNode(subject) : new IRI(subject), + new IRI(predicate), object.get("type").equals("blank node") ? new BlankNode(object) : new IRI(object), graph); + } + + public Quad(final String subject, final String predicate, final String object, + final String graph) { + this(subject, predicate, + object.startsWith("_:") ? new BlankNode(object) : new IRI(object), graph); + }; + + public Quad(final String subject, final String predicate, final String value, + final String datatype, final String language, final String graph) { + this(subject, predicate, new Literal(value, datatype, language), graph); + } + + private Quad(final String subject, final String predicate, final Node object, + final String graph) { + this(subject.startsWith("_:") ? new BlankNode(subject) : new IRI(subject), + new IRI(predicate), object, graph); + } + + public Quad(final Node subject, final Node predicate, final Node object, + final String graph) { + super(); + put("subject", subject); + put("predicate", predicate); + put("object", object); + if (graph != null && !"@default".equals(graph)) { + // TODO: i'm not yet sure if this should be added or if the + // graph should only be represented by the keys in the dataset + put("name", graph.startsWith("_:") ? new BlankNode(graph) : new IRI(graph)); + } + } + + public Node getSubject() { + return (Node) get("subject"); + } + + public Node getPredicate() { + return (Node) get("predicate"); + } + + public Node getObject() { + return (Node) get("object"); + } + + public Node getGraph() { + return (Node) get("name"); + } + + @Override + public int compareTo(Quad o) { + if (o == null) { + return 1; + } + int rval = getGraph().compareTo(o.getGraph()); + if (rval != 0) { + return rval; + } + rval = getSubject().compareTo(o.getSubject()); + if (rval != 0) { + return rval; + } + rval = getPredicate().compareTo(o.getPredicate()); + if (rval != 0) { + return rval; + } + return getObject().compareTo(o.getObject()); + } + } + + public static abstract class Node extends LinkedHashMap + implements Comparable { + private static final long serialVersionUID = 1460990331795672793L; + + public abstract boolean isLiteral(); + + public abstract boolean isIRI(); + + public abstract boolean isBlankNode(); + + public String getValue() { + return (String) get("value"); + } + + public String getDatatype() { + return (String) get("datatype"); + } + + public String getLanguage() { + return (String) get("language"); + } + + @Override + public int compareTo(Node o) { + if (o == null) { + // valid nodes are > null nodes + return 1; + } + if (this.isIRI()) { + if (!o.isIRI()) { + // IRIs > everything + return 1; + } + } else if (this.isBlankNode()) { + if (o.isIRI()) { + // IRI > blank node + return -1; + } else if (o.isLiteral()) { + // blank node > literal + return 1; + } + } else if (this.isLiteral()) { + if (o.isIRI() || o.isBlankNode()) { + return -1; // literals < blanknode < IRI + } + } + // NOTE: Literal will also need to compare + // language and datatype + return this.getValue().compareTo(o.getValue()); + } + + /** + * Converts an RDF triple object to a JSON-LD object. + * + * @param useNativeTypes + * true to output native types, false not to. + * + * @return the JSON-LD object. + * @throws JsonLdError + */ + Map toObject(Boolean useNativeTypes) throws JsonLdError { + // If value is an an IRI or a blank node identifier, return a new + // JSON object consisting + // of a single member @id whose value is set to value. + if (isIRI() || isBlankNode()) { + return newMap("@id", getValue()); + } + + // convert literal object to JSON-LD + final Map rval = newMap("@value", getValue()); + + // add language + if (getLanguage() != null) { + rval.put("@language", getLanguage()); + } + // add datatype + else { + final String type = getDatatype(); + final String value = getValue(); + if (useNativeTypes) { + // use native datatypes for certain xsd types + if (XSD_STRING.equals(type)) { + // don't add xsd:string + } else if (XSD_BOOLEAN.equals(type)) { + if ("true".equals(value)) { + rval.put("@value", Boolean.TRUE); + } else if ("false".equals(value)) { + rval.put("@value", Boolean.FALSE); + } else { + // Else do not replace the value, and add the + // boolean type in + rval.put("@type", type); + } + } else if ( + // http://www.w3.org/TR/xmlschema11-2/#integer + (XSD_INTEGER.equals(type) && PATTERN_INTEGER.matcher(value).matches()) + // http://www.w3.org/TR/xmlschema11-2/#nt-doubleRep + || (XSD_DOUBLE.equals(type) + && PATTERN_DOUBLE.matcher(value).matches())) { + try { + final Double d = Double.parseDouble(value); + if (!Double.isNaN(d) && !Double.isInfinite(d)) { + if (XSD_INTEGER.equals(type)) { + final Integer i = d.intValue(); + if (i.toString().equals(value)) { + rval.put("@value", i); + } + } else if (XSD_DOUBLE.equals(type)) { + rval.put("@value", d); + } else { + throw new RuntimeException( + "This should never happen as we checked the type was either integer or double"); + } + } + } catch (final NumberFormatException e) { + // TODO: This should never happen since we match the + // value with regex! + throw new RuntimeException(e); + } + } + // do not add xsd:string type + else { + rval.put("@type", type); + } + } else if (!XSD_STRING.equals(type)) { + rval.put("@type", type); + } + } + return rval; + } + } + + public static class Literal extends Node { + private static final long serialVersionUID = 8124736271571220251L; + + public Literal(String value, String datatype, String language) { + super(); + put("type", "literal"); + put("value", value); + put("datatype", datatype != null ? datatype : XSD_STRING); + if (language != null) { + put("language", language); + } + } + + @Override + public boolean isLiteral() { + + return true; + } + + @Override + public boolean isIRI() { + return get("type").equals("IRI"); + } + + @Override + public boolean isBlankNode() { + return false; + } + + @SuppressWarnings("rawtypes") + private static int nullSafeCompare(Comparable a, Comparable b) { + if (a == null && b == null) { + return 0; + } + if (a == null) { + return 1; + } + if (b == null) { + return -1; + } + return a.compareTo(b); + } + + @Override + public int compareTo(Node o) { + // NOTE: this will also compare getValue() early! + int nodeCompare = super.compareTo(o); + if (nodeCompare != 0) { + // null, different type or different value + return nodeCompare; + } + if (this.getLanguage() != null || o.getLanguage() != null) { + // We'll ignore type-checking if either has language tag + // as language tagged literals should always have the type + // rdf:langString in RDF 1.1 + return nullSafeCompare(this.getLanguage(), o.getLanguage()); + } else { + return nullSafeCompare(this.getDatatype(), o.getDatatype()); + } + // NOTE: getValue() already compared by super.compareTo() + } + } + + public static class IRI extends Node { + private static final long serialVersionUID = 1540232072155490782L; + + public IRI(Map map) { + this.clear(); + this.putAll(map); + } + + public IRI(String iri) { + super(); + put("type", "IRI"); + put("value", iri); + } + + @Override + public boolean isLiteral() { + return false; + } + + @Override + public boolean isIRI() { + return true; + } + + @Override + public boolean isBlankNode() { + return false; + } + } + + public static class BlankNode extends Node { + private static final long serialVersionUID = -2842402820440697318L; + + public BlankNode(Map map) { + this.clear(); + this.putAll(map); + } + public BlankNode(String attribute) { + super(); + put("type", "blank node"); + put("value", attribute); + } + + @Override + public boolean isLiteral() { + return false; + } + + @Override + public boolean isIRI() { + return false; + } + + @Override + public boolean isBlankNode() { + return true; + } + } + + private static final Node first = new IRI(RDF_FIRST); + private static final Node rest = new IRI(RDF_REST); + private static final Node nil = new IRI(RDF_NIL); + + private final Map context; + + // private UniqueNamer namer; + private JsonLdApi api; + + public RDFDataset() { + super(); + put("@default", new ArrayList()); + context = new LinkedHashMap(); + // put("@context", context); + } + + /* + * public RDFDataset(String blankNodePrefix) { this(new + * UniqueNamer(blankNodePrefix)); } + * + * public RDFDataset(UniqueNamer namer) { this(); this.namer = namer; } + */ + public RDFDataset(JsonLdApi jsonLdApi) { + this(); + this.api = jsonLdApi; + } + + public void setNamespace(String ns, String prefix) { + context.put(ns, prefix); + } + + public String getNamespace(String ns) { + return context.get(ns); + } + + /** + * clears all the namespaces in this dataset + */ + public void clearNamespaces() { + context.clear(); + } + + public Map getNamespaces() { + return context; + } + + /** + * Returns a valid context containing any namespaces set + * + * @return The context map + */ + public Map getContext() { + final Map rval = newMap(); + rval.putAll(context); + // replace "" with "@vocab" + if (rval.containsKey("")) { + rval.put("@vocab", rval.remove("")); + } + return rval; + } + + /** + * parses a context object and sets any namespaces found within it + * + * @param contextLike + * The context to parse + * @throws JsonLdError + * If the context can't be parsed + */ + public void parseContext(Object contextLike) throws JsonLdError { + Context context; + if (api != null) { + context = new Context(api.opts); + } else { + context = new Context(); + } + // Context will do our recursive parsing and initial IRI resolution + context = context.parse(contextLike); + // And then leak to us the potential 'prefixes' + final Map prefixes = context.getPrefixes(true); + + for (final String key : prefixes.keySet()) { + final String val = prefixes.get(key); + if ("@vocab".equals(key)) { + if (val == null || isString(val)) { + setNamespace("", val); + } else { + } + } else if (!isKeyword(key)) { + setNamespace(key, val); + // TODO: should we make sure val is a valid URI prefix (i.e. it + // ends with /# or ?) + // or is it ok that full URIs for terms are used? + } + } + } + + /** + * Adds a triple to the @default graph of this dataset + * + * @param subject + * the subject for the triple + * @param predicate + * the predicate for the triple + * @param value + * the value of the literal object for the triple + * @param datatype + * the datatype of the literal object for the triple (null values + * will default to xsd:string) + * @param language + * the language of the literal object for the triple (or null) + */ + public void addTriple(final String subject, final String predicate, final String value, + final String datatype, final String language) { + addQuad(subject, predicate, value, datatype, language, "@default"); + } + + /** + * Adds a triple to the specified graph of this dataset + * + * @param s + * the subject for the triple + * @param p + * the predicate for the triple + * @param value + * the value of the literal object for the triple + * @param datatype + * the datatype of the literal object for the triple (null values + * will default to xsd:string) + * @param graph + * the graph to add this triple to + * @param language + * the language of the literal object for the triple (or null) + */ + public void addQuad(final String s, final String p, final String value, final String datatype, + final String language, String graph) { + if (graph == null) { + graph = "@default"; + } + if (!containsKey(graph)) { + put(graph, new ArrayList()); + } + ((ArrayList) get(graph)).add(new Quad(s, p, value, datatype, language, graph)); + } + + /** + * Adds a triple to the default graph of this dataset + * + * @param subject + * the subject for the triple + * @param predicate + * the predicate for the triple + * @param object + * the object for the triple + */ + public void addTriple(final String subject, final String predicate, final String object) { + addQuad(subject, predicate, object, "@default"); + } + + /** + * Adds a triple to the specified graph of this dataset + * + * @param subject + * the subject for the triple + * @param predicate + * the predicate for the triple + * @param object + * the object for the triple + * @param graph + * the graph to add this triple to + */ + public void addQuad(final String subject, final String predicate, final String object, + String graph) { + if (graph == null) { + graph = "@default"; + } + if (!containsKey(graph)) { + put(graph, new ArrayList()); + } + ((ArrayList) get(graph)).add(new Quad(subject, predicate, object, graph)); + } + + /** + * Creates an array of RDF triples for the given graph. + * + * @param graphName + * The graph URI + * @param graph + * the graph to create RDF triples for. + */ + void graphToRDF(String graphName, Map graph) { + // 4.2) + final List triples = new ArrayList(); + // 4.3) + final List subjects = new ArrayList(graph.keySet()); + // Collections.sort(subjects); + for (final String id : subjects) { + if (JsonLdUtils.isRelativeIri(id)) { + continue; + } + final Map node = (Map) graph.get(id); + final List properties = new ArrayList(node.keySet()); + Collections.sort(properties); + for (String property : properties) { + final List values; + // 4.3.2.1) + if ("@type".equals(property)) { + values = (List) node.get("@type"); + property = RDF_TYPE; + } + // 4.3.2.2) + else if (isKeyword(property)) { + continue; + } + // 4.3.2.3) + else if (property.startsWith("_:") && !api.opts.getProduceGeneralizedRdf()) { + continue; + } + // 4.3.2.4) + else if (JsonLdUtils.isRelativeIri(property)) { + continue; + } else { + values = (List) node.get(property); + } + + Node subject; + if (id.indexOf("_:") == 0) { + // NOTE: don't rename, just set it as a blank node + subject = new BlankNode(id); + } else { + subject = new IRI(id); + } + + // RDF predicates + Node predicate; + if (property.startsWith("_:")) { + predicate = new BlankNode(property); + } else { + predicate = new IRI(property); + } + + for (final Object item : values) { + // convert @list to triples + if (isList(item)) { + final List list = (List) ((Map) item) + .get("@list"); + Node last = null; + Node firstBNode = nil; + if (!list.isEmpty()) { + last = objectToRDF(list.get(list.size() - 1)); + firstBNode = new BlankNode(api.generateBlankNodeIdentifier()); + } + triples.add(new Quad(subject, predicate, firstBNode, graphName)); + for (int i = 0; i < list.size() - 1; i++) { + final Node object = objectToRDF(list.get(i)); + triples.add(new Quad(firstBNode, first, object, graphName)); + final Node restBNode = new BlankNode(api.generateBlankNodeIdentifier()); + triples.add(new Quad(firstBNode, rest, restBNode, graphName)); + firstBNode = restBNode; + } + if (last != null) { + triples.add(new Quad(firstBNode, first, last, graphName)); + triples.add(new Quad(firstBNode, rest, nil, graphName)); + } + } + // convert value or node object to triple + else { + final Node object = objectToRDF(item); + if (object != null) { + triples.add(new Quad(subject, predicate, object, graphName)); + } + } + } + } + } + put(graphName, triples); + } + + /** + * Converts a JSON-LD value object to an RDF literal or a JSON-LD string or + * node object to an RDF resource. + * + * @param item + * the JSON-LD value or node object. + * @return the RDF literal or RDF resource. + */ + private Node objectToRDF(Object item) { + // convert value object to RDF + if (isValue(item)) { + final Object value = ((Map) item).get("@value"); + final Object datatype = ((Map) item).get("@type"); + + // convert to XSD datatypes as appropriate + if (value instanceof Boolean || value instanceof Number) { + // convert to XSD datatype + if (value instanceof Boolean) { + return new Literal(value.toString(), + datatype == null ? XSD_BOOLEAN : (String) datatype, null); + } else if (value instanceof Double || value instanceof Float + || XSD_DOUBLE.equals(datatype)) { + // canonical double representation + final DecimalFormat df = new DecimalFormat("0.0###############E0"); + df.setDecimalFormatSymbols(DecimalFormatSymbols.getInstance(Locale.US)); + return new Literal(df.format(value), + datatype == null ? XSD_DOUBLE : (String) datatype, null); + } else { + final DecimalFormat df = new DecimalFormat("0"); + return new Literal(df.format(value), + datatype == null ? XSD_INTEGER : (String) datatype, null); + } + } else if (((Map) item).containsKey("@language")) { + return new Literal((String) value, + datatype == null ? RDF_LANGSTRING : (String) datatype, + (String) ((Map) item).get("@language")); + } else { + return new Literal((String) value, + datatype == null ? XSD_STRING : (String) datatype, null); + } + } + // convert string/node object to RDF + else { + final String id; + if (isObject(item)) { + id = (String) ((Map) item).get("@id"); + if (JsonLdUtils.isRelativeIri(id)) { + return null; + } + } else { + id = (String) item; + } + if (id.indexOf("_:") == 0) { + // NOTE: once again no need to rename existing blank nodes + return new BlankNode(id); + } else { + return new IRI(id); + } + } + } + + public Set graphNames() { + // TODO Auto-generated method stub + return keySet(); + } + + public List getQuads(String graphName) { + return (List) get(graphName); + } +} \ No newline at end of file diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/RDFDatasetUtils.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/RDFDatasetUtils.java new file mode 100644 index 0000000..0913338 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/RDFDatasetUtils.java @@ -0,0 +1,373 @@ +package com.github.jsonldjava.core; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static com.github.jsonldjava.core.JsonLdConsts.RDF_LANGSTRING; +import static com.github.jsonldjava.core.JsonLdConsts.XSD_STRING; +import static com.github.jsonldjava.core.Regex.HEX; + +public class RDFDatasetUtils { + + public static String toNQuads(RDFDataset dataset) { + final StringBuilder output = new StringBuilder(256); + toNQuads(dataset, output); + return output.toString(); + } + + public static void toNQuads(RDFDataset dataset, StringBuilder output) { + final List quads = new ArrayList(); + for (String graphName : dataset.graphNames()) { + final List triples = dataset.getQuads(graphName); + if ("@default".equals(graphName)) { + graphName = null; + } + for (final RDFDataset.Quad triple : triples) { + quads.add(toNQuad(triple, graphName)); + } + } + Collections.sort(quads); + for (final String quad : quads) { + output.append(quad); + } + } + + static String toNQuad(RDFDataset.Quad triple, String graphName, String bnode) { + final StringBuilder output = new StringBuilder(256); + toNQuad(triple, graphName, bnode, output); + return output.toString(); + } + + static void toNQuad(RDFDataset.Quad triple, String graphName, String bnode, + StringBuilder output) { + final RDFDataset.Node s = triple.getSubject(); + final RDFDataset.Node p = triple.getPredicate(); + final RDFDataset.Node o = triple.getObject(); + + // subject is an IRI or bnode + if (s.isIRI()) { + output.append("<"); + escape(s.getValue(), output); + output.append(">"); + } + // normalization mode + else if (bnode != null) { + output.append(bnode.equals(s.getValue()) ? "_:a" : "_:z"); + } + // normal mode + else { + output.append(s.getValue()); + } + + if (p.isIRI()) { + output.append(" <"); + escape(p.getValue(), output); + output.append("> "); + } + // otherwise it must be a bnode (TODO: can we only allow this if the + // flag is set in options?) + else { + output.append(" "); + escape(p.getValue(), output); + output.append(" "); + } + + // object is IRI, bnode or literal + if (o.isIRI()) { + output.append("<"); + escape(o.getValue(), output); + output.append(">"); + } else if (o.isBlankNode()) { + // normalization mode + if (bnode != null) { + output.append(bnode.equals(o.getValue()) ? "_:a" : "_:z"); + } + // normal mode + else { + output.append(o.getValue()); + } + } else { + output.append("\""); + escape(o.getValue(), output); + output.append("\""); + if (RDF_LANGSTRING.equals(o.getDatatype())) { + output.append("@").append(o.getLanguage()); + } else if (!XSD_STRING.equals(o.getDatatype())) { + output.append("^^<"); + escape(o.getDatatype(), output); + output.append(">"); + } + } + + // graph + if (graphName != null) { + if (graphName.indexOf("_:") != 0) { + output.append(" <"); + escape(graphName, output); + output.append(">"); + } else if (bnode != null) { + output.append(" _:g"); + } else { + output.append(" ").append(graphName); + } + } + + output.append(" .\n"); + } + + static String toNQuad(RDFDataset.Quad triple, String graphName) { + return toNQuad(triple, graphName, null); + } + + final private static Pattern UCHAR_MATCHED = Pattern + .compile("\\u005C(?:([tbnrf\\\"'])|(?:u(" + HEX + "{4}))|(?:U(" + HEX + "{8})))"); + + public static String unescape(String str) { + String rval = str; + if (str != null) { + final Matcher m = UCHAR_MATCHED.matcher(str); + while (m.find()) { + String uni = m.group(0); + if (m.group(1) == null) { + final String hex = m.group(2) != null ? m.group(2) : m.group(3); + final int v = Integer.parseInt(hex, 16);// hex = + // hex.replaceAll("^(?:00)+", + // ""); + if (v > 0xFFFF) { + // deal with UTF-32 + // Integer v = Integer.parseInt(hex, 16); + final int vt = v - 0x10000; + final int vh = vt >> 10; + final int v1 = vt & 0x3FF; + final int w1 = 0xD800 + vh; + final int w2 = 0xDC00 + v1; + + final StringBuilder b = new StringBuilder(); + b.appendCodePoint(w1); + b.appendCodePoint(w2); + uni = b.toString(); + } else { + uni = Character.toString((char) v); + } + } else { + final char c = m.group(1).charAt(0); + switch (c) { + case 'b': + uni = "\b"; + break; + case 'n': + uni = "\n"; + break; + case 't': + uni = "\t"; + break; + case 'f': + uni = "\f"; + break; + case 'r': + uni = "\r"; + break; + case '\'': + uni = "'"; + break; + case '\"': + uni = "\""; + break; + case '\\': + uni = "\\"; + break; + default: + // do nothing + continue; + } + } + final String pat = Pattern.quote(m.group(0)); + // final String x = Integer.toHexString(uni.charAt(0)); + rval = rval.replaceAll(pat, uni); + } + } + return rval; + } + + /** + * Escapes the given string according to the N-Quads escape rules + * + * @param str + * The string to escape + * @param rval + * The {@link StringBuilder} to append to. + */ + public static void escape(String str, StringBuilder rval) { + for (int i = 0; i < str.length(); i++) { + final char hi = str.charAt(i); + if (hi <= 0x8 || hi == 0xB || hi == 0xC || (hi >= 0xE && hi <= 0x1F) + || (hi >= 0x7F && hi <= 0xA0) || // 0xA0 is end of + // non-printable latin-1 + // supplement + // characters + ((hi >= 0x24F // 0x24F is the end of latin extensions + && !Character.isHighSurrogate(hi)) + // TODO: there's probably a lot of other characters that + // shouldn't be escaped that + // fall outside these ranges, this is one example from the + // json-ld tests + )) { + rval.append(String.format("\\u%04x", (int) hi)); + } else if (Character.isHighSurrogate(hi)) { + final char lo = str.charAt(++i); + final int c = (hi << 10) + lo + (0x10000 - (0xD800 << 10) - 0xDC00); + rval.append(String.format("\\U%08x", c)); + } else { + switch (hi) { + case '\b': + rval.append("\\b"); + break; + case '\n': + rval.append("\\n"); + break; + case '\t': + rval.append("\\t"); + break; + case '\f': + rval.append("\\f"); + break; + case '\r': + rval.append("\\r"); + break; + // case '\'': + // rval += "\\'"; + // break; + case '\"': + rval.append("\\\""); + // rval += "\\u0022"; + break; + case '\\': + rval.append("\\\\"); + break; + default: + // just put the char as is + rval.append(hi); + break; + } + } + } + // return rval; + } + + private static class Regex { + // define partial regexes + // final public static Pattern IRI = + // Pattern.compile("(?:<([^:]+:[^>]*)>)"); + final public static Pattern IRI = Pattern.compile("(?:<([^>]*)>)"); + final public static Pattern BNODE = Pattern.compile("(_:(?:[A-Za-z][A-Za-z0-9]*))"); + final public static Pattern PLAIN = Pattern.compile("\"([^\"\\\\]*(?:\\\\.[^\"\\\\]*)*)\""); + final public static Pattern DATATYPE = Pattern.compile("(?:\\^\\^" + IRI + ")"); + final public static Pattern LANGUAGE = Pattern.compile("(?:@([a-z]+(?:-[a-zA-Z0-9]+)*))"); + final public static Pattern LITERAL = Pattern + .compile("(?:" + PLAIN + "(?:" + DATATYPE + "|" + LANGUAGE + ")?)"); + final public static Pattern WS = Pattern.compile("[ \\t]+"); + final public static Pattern WSO = Pattern.compile("[ \\t]*"); + final public static Pattern EOLN = Pattern.compile("(?:\r\n)|(?:\n)|(?:\r)"); + final public static Pattern EMPTY = Pattern.compile("^" + WSO + "$"); + + // define quad part regexes + final public static Pattern SUBJECT = Pattern.compile("(?:" + IRI + "|" + BNODE + ")" + WS); + final public static Pattern PROPERTY = Pattern.compile(IRI.pattern() + WS.pattern()); + final public static Pattern OBJECT = Pattern + .compile("(?:" + IRI + "|" + BNODE + "|" + LITERAL + ")" + WSO); + final public static Pattern GRAPH = Pattern + .compile("(?:\\.|(?:(?:" + IRI + "|" + BNODE + ")" + WSO + "\\.))"); + + // full quad regex + final public static Pattern QUAD = Pattern + .compile("^" + WSO + SUBJECT + PROPERTY + OBJECT + GRAPH + WSO + "$"); + } + + /** + * Parses RDF in the form of N-Quads. + * + * @param input + * the N-Quads input to parse. + * + * @return an RDF dataset. + * @throws JsonLdError + * If there was an error parsing the N-Quads document. + */ + public static RDFDataset parseNQuads(String input) throws JsonLdError { + // build RDF dataset + final RDFDataset dataset = new RDFDataset(); + + // split N-Quad input into lines + final String[] lines = Regex.EOLN.split(input); + int lineNumber = 0; + for (final String line : lines) { + lineNumber++; + + // skip empty lines + if (Regex.EMPTY.matcher(line).matches()) { + continue; + } + + // parse quad + final Matcher match = Regex.QUAD.matcher(line); + if (!match.matches()) { + throw new JsonLdError(JsonLdError.Error.SYNTAX_ERROR, + "Error while parsing N-Quads; invalid quad. line:" + lineNumber); + } + + // get subject + RDFDataset.Node subject; + if (match.group(1) != null) { + subject = new RDFDataset.IRI(unescape(match.group(1))); + } else { + subject = new RDFDataset.BlankNode(unescape(match.group(2))); + } + + // get predicate + final RDFDataset.Node predicate = new RDFDataset.IRI(unescape(match.group(3))); + + // get object + RDFDataset.Node object; + if (match.group(4) != null) { + object = new RDFDataset.IRI(unescape(match.group(4))); + } else if (match.group(5) != null) { + object = new RDFDataset.BlankNode(unescape(match.group(5))); + } else { + final String language = unescape(match.group(8)); + final String datatype = match.group(7) != null ? unescape(match.group(7)) + : match.group(8) != null ? RDF_LANGSTRING : XSD_STRING; + final String unescaped = unescape(match.group(6)); + object = new RDFDataset.Literal(unescaped, datatype, language); + } + + // get graph name ('@default' is used for the default graph) + String name = "@default"; + if (match.group(9) != null) { + name = unescape(match.group(9)); + } else if (match.group(10) != null) { + name = unescape(match.group(10)); + } + + final RDFDataset.Quad triple = new RDFDataset.Quad(subject, predicate, object, name); + + // initialise graph in dataset + if (!dataset.containsKey(name)) { + final List tmp = new ArrayList(); + tmp.add(triple); + dataset.put(name, tmp); + } + // add triple if unique to its graph + else { + final List triples = (List) dataset.get(name); + if (!triples.contains(triple)) { + triples.add(triple); + } + } + } + + return dataset; + } +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/RDFParser.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/RDFParser.java new file mode 100644 index 0000000..db4ec69 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/RDFParser.java @@ -0,0 +1,47 @@ +package com.github.jsonldjava.core; + +/** + * Interface for parsing RDF into the RDF Dataset objects to be used by + * JSONLD.fromRDF + * + * @author Tristan + * + */ +public interface RDFParser { + + /** + * Parse the input into the internal RDF Dataset format The format is a Map + * with the following structure: { GRAPH_1: [ TRIPLE_1, TRIPLE_2, ..., + * TRIPLE_N ], GRAPH_2: [ TRIPLE_1, TRIPLE_2, ..., TRIPLE_N ], ... GRAPH_N: + * [ TRIPLE_1, TRIPLE_2, ..., TRIPLE_N ] } + * + * GRAPH: Must be the graph name/IRI. if no graph is present for a triple, + * add it to the "@default" graph TRIPLE: Must be a map with the following + * structure: { "subject" : SUBJECT "predicate" : PREDICATE "object" : + * OBJECT } + * + * Each of the values in the triple map must also be a map with the + * following key-value pairs: "value" : The value of the node. "subject" can + * be an IRI or blank node id. "predicate" should only ever be an IRI + * "object" can be and IRI or blank node id, or a literal value (represented + * as a string) "type" : "IRI" if the value is an IRI or "blank node" if the + * value is a blank node. "object" can also be "literal" in the case of + * literals. The value of "object" can also contain the following optional + * key-value pairs: "language" : the language value of a string literal + * "datatype" : the datatype of the literal. (if not set will default to + * XSD:string, if set to null, null will be used). + * + * The RDFDatasetUtils class has the following helper methods to make + * generating this format easier: result = getInitialRDFDatasetResult(); + * triple = generateTriple(s,p,o); triple = + * generateTriple(s,p,value,datatype,language); + * addTripleToRDFDatasetResult(result, graphName, triple); + * + * @param input + * The RDF library specific input to parse + * @return The input parsed using the internal RDF Dataset format + * @throws JsonLdError + * If there was an error parsing the input + */ + public RDFDataset parse(Object input) throws JsonLdError; +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/Regex.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/Regex.java new file mode 100644 index 0000000..33f4375 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/Regex.java @@ -0,0 +1,56 @@ +package com.github.jsonldjava.core; + +import java.util.regex.Pattern; + +class Regex { + final public static Pattern TRICKY_UTF_CHARS = Pattern.compile( + // ("1.7".equals(System.getProperty("java.specification.version")) ? + // "[\\x{10000}-\\x{EFFFF}]" : + "[\uD800\uDC00-\uDB7F\uDFFF]" // this seems to work with jdk1.6 + ); + // for ttl + final public static Pattern PN_CHARS_BASE = Pattern.compile( + "[a-zA-Z]|[\\u00C0-\\u00D6]|[\\u00D8-\\u00F6]|[\\u00F8-\\u02FF]|[\\u0370-\\u037D]|[\\u037F-\\u1FFF]|" + + "[\\u200C-\\u200D]|[\\u2070-\\u218F]|[\\u2C00-\\u2FEF]|[\\u3001-\\uD7FF]|[\\uF900-\\uFDCF]|[\\uFDF0-\\uFFFD]|" + + TRICKY_UTF_CHARS); + final public static Pattern PN_CHARS_U = Pattern.compile(PN_CHARS_BASE + "|[_]"); + final public static Pattern PN_CHARS = Pattern + .compile(PN_CHARS_U + "|[-0-9]|[\\u00B7]|[\\u0300-\\u036F]|[\\u203F-\\u2040]"); + final public static Pattern PN_PREFIX = Pattern.compile( + "(?:(?:" + PN_CHARS_BASE + ")(?:(?:" + PN_CHARS + "|[\\.])*(?:" + PN_CHARS + "))?)"); + final public static Pattern HEX = Pattern.compile("[0-9A-Fa-f]"); + final public static Pattern PN_LOCAL_ESC = Pattern + .compile("[\\\\][_~\\.\\-!$&'\\(\\)*+,;=/?#@%]"); + final public static Pattern PERCENT = Pattern.compile("%" + HEX + HEX); + final public static Pattern PLX = Pattern.compile(PERCENT + "|" + PN_LOCAL_ESC); + final public static Pattern PN_LOCAL = Pattern + .compile("((?:" + PN_CHARS_U + "|[:]|[0-9]|" + PLX + ")(?:(?:" + PN_CHARS + "|[.]|[:]|" + + PLX + ")*(?:" + PN_CHARS + "|[:]|" + PLX + "))?)"); + final public static Pattern PNAME_NS = Pattern.compile("((?:" + PN_PREFIX + ")?):"); + final public static Pattern PNAME_LN = Pattern.compile("" + PNAME_NS + PN_LOCAL); + final public static Pattern UCHAR = Pattern.compile("\\u005Cu" + HEX + HEX + HEX + HEX + + "|\\u005CU" + HEX + HEX + HEX + HEX + HEX + HEX + HEX + HEX); + final public static Pattern ECHAR = Pattern.compile("\\u005C[tbnrf\\u005C\"']"); + final public static Pattern IRIREF = Pattern + .compile("(?:<((?:[^\\x00-\\x20<>\"{}|\\^`\\\\]|" + UCHAR + ")*)>)"); + final public static Pattern BLANK_NODE_LABEL = Pattern.compile("(?:_:((?:" + PN_CHARS_U + + "|[0-9])(?:(?:" + PN_CHARS + "|[\\.])*(?:" + PN_CHARS + "))?))"); + final public static Pattern WS = Pattern.compile("[ \t\r\n]"); + final public static Pattern WS_0_N = Pattern.compile(WS + "*"); + final public static Pattern WS_0_1 = Pattern.compile(WS + "?"); + final public static Pattern WS_1_N = Pattern.compile(WS + "+"); + final public static Pattern STRING_LITERAL_QUOTE = Pattern.compile( + "\"(?:[^\\u0022\\u005C\\u000A\\u000D]|(?:" + ECHAR + ")|(?:" + UCHAR + "))*\""); + final public static Pattern STRING_LITERAL_SINGLE_QUOTE = Pattern + .compile("'(?:[^\\u0027\\u005C\\u000A\\u000D]|(?:" + ECHAR + ")|(?:" + UCHAR + "))*'"); + final public static Pattern STRING_LITERAL_LONG_SINGLE_QUOTE = Pattern + .compile("'''(?:(?:(?:'|'')?[^'\\\\])|" + ECHAR + "|" + UCHAR + ")*'''"); + final public static Pattern STRING_LITERAL_LONG_QUOTE = Pattern + .compile("\"\"\"(?:(?:(?:\"|\"\")?[^\\\"\\\\])|" + ECHAR + "|" + UCHAR + ")*\"\"\""); + final public static Pattern LANGTAG = Pattern.compile("(?:@([a-zA-Z]+(?:-[a-zA-Z0-9]+)*))"); + final public static Pattern INTEGER = Pattern.compile("[+-]?[0-9]+"); + final public static Pattern DECIMAL = Pattern.compile("[+-]?[0-9]*\\.[0-9]+"); + final public static Pattern EXPONENT = Pattern.compile("[eE][+-]?[0-9]+"); + final public static Pattern DOUBLE = Pattern.compile("[+-]?(?:(?:[0-9]+\\.[0-9]*" + EXPONENT + + ")|(?:\\.[0-9]+" + EXPONENT + ")|(?:[0-9]+" + EXPONENT + "))"); +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/RemoteDocument.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/RemoteDocument.java new file mode 100644 index 0000000..5cd59e3 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/RemoteDocument.java @@ -0,0 +1,84 @@ +package com.github.jsonldjava.core; + +/** + * Encapsulates a URL along with the parsed resource matching the URL. + * + * @author Tristan King + */ +public class RemoteDocument { +/* + //private final String documentUrl; + //private final Object document; + + *//** + * Create a new RemoteDocument with the URL and the parsed resource for the + * document. + * + * @param url + * The URL + * @param document + * The parsed resource for the document + *//* + *//*public RemoteDocument(String url, Object document) { + this.documentUrl = url; + this.document = document; + }*//* + + *//** + * Get the URL for this document. + * + * @return The URL for this document, as a String + *//* + public String getDocumentUrl() { + return documentUrl; + } + + *//** + * Get the parsed resource for this document. + * + * @return The parsed resource for this document + *//* + public Object getDocument() { + return document; + }*/ + + + public String getDocumentUrl() { + return documentUrl; + } + + public void setDocumentUrl(String documentUrl) { + this.documentUrl = documentUrl; + } + + public Object getDocument() { + return document; + } + + public void setDocument(Object document) { + this.document = document; + } + + public String getContextUrl() { + return contextUrl; + } + + public void setContextUrl(String contextUrl) { + this.contextUrl = contextUrl; + } + + String documentUrl; + Object document; + String contextUrl; + + public RemoteDocument(String url, Object document) { + this(url, document, null); + } + + public RemoteDocument(String url, Object document, String context) { + this.documentUrl = url; + this.document = document; + this.contextUrl = context; + } + +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/UniqueNamer.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/UniqueNamer.java new file mode 100644 index 0000000..0d0aa76 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/UniqueNamer.java @@ -0,0 +1,72 @@ +package com.github.jsonldjava.core; + +import java.util.LinkedHashMap; +import java.util.Map; + +class UniqueNamer { + private final String prefix; + private int counter; + private Map existing; + + /** + * Creates a new UniqueNamer. A UniqueNamer issues unique names, keeping + * track of any previously issued names. + * + * @param prefix + * the prefix to use ('<prefix><counter>'). + */ + public UniqueNamer(String prefix) { + this.prefix = prefix; + this.counter = 0; + this.existing = new LinkedHashMap(); + } + + /** + * Copies this UniqueNamer. + * + * @return a copy of this UniqueNamer. + */ + @Override + public UniqueNamer clone() { + final UniqueNamer copy = new UniqueNamer(this.prefix); + copy.counter = this.counter; + copy.existing = (Map) JsonLdUtils.clone(this.existing); + return copy; + } + + /** + * Gets the new name for the given old name, where if no old name is given a + * new name will be generated. + * + * @param oldName + * the old name to get the new name for. + * + * @return the new name. + */ + public String getName(String oldName) { + if (oldName != null && this.existing.containsKey(oldName)) { + return this.existing.get(oldName); + } + + final String name = this.prefix + this.counter; + this.counter++; + + if (oldName != null) { + this.existing.put(oldName, name); + } + + return name; + } + + public String getName() { + return getName(null); + } + + public Boolean isNamed(String oldName) { + return this.existing.containsKey(oldName); + } + + public Map existing() { + return existing; + } +} \ No newline at end of file diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/core/Urdna2015.java b/jsonldjava/src/main/java/com/github/jsonldjava/core/Urdna2015.java new file mode 100644 index 0000000..95d4fbd --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/core/Urdna2015.java @@ -0,0 +1,678 @@ +package com.github.jsonldjava.core; + + +import java.util.*; + +import static com.github.jsonldjava.core.RDFDatasetUtils.parseNQuads; + +public class Urdna2015 { + + /* + * This library has been modified to meet blockcerts qualifications + */ + + private static final Map QUAD_POSITIONS = new HashMap() {{ + put("subject", "s"); + put("object", "o"); + put("name", "g"); + }}; + + private List>> quads; + private Map>> blankNodeInfo; + private Map> hashToBlankNodes; + private IdentifierIssuer canonicalIssuer; + + private Map dataset; + private JsonLdOptions options; + + public Urdna2015(Map dataset, JsonLdOptions options) { + this.dataset = dataset; + this.options = options; + + } + + public Object normalize() { + this.quads = new ArrayList<>(); + this.blankNodeInfo = new HashMap<>(); + this.hashToBlankNodes = new HashMap<>(); + this.canonicalIssuer = new IdentifierIssuer("_:c14n"); + + /* + * 2) For every quad in input dataset: + * STATUS : step 2 is good! + */ + for (String graphName : this.dataset.keySet()) { + List>> triples = (List>>) this.dataset + .get(graphName); + + if (graphName.equals("@default")) { + graphName = null; + } + + for (Map> quad : triples) { + if (graphName != null) { + if (graphName.startsWith("_:")) { + Map tmp = new HashMap<>(); + tmp.put("type", "blank node"); + quad.put("name", tmp); + } else { + Map tmp = new HashMap<>(); + tmp.put("type", "IRI"); + quad.put("name", tmp); + } + quad.get("name").put("value", graphName); + } + this.quads.add(quad); + + /* 2.1) For each blank node that occurs in the quad, add a + * reference to the quad using the blank node identifier in the + * blank node to quads map, creating a new entry if necessary. + * */ + + for (String key : quad.keySet()) { + + HashMap component = (HashMap) quad.get(key); + if (key.equals("predicate") || !component.get("type").equals("blank node")) { + continue; + } + String id = component.get("value"); + if (this.blankNodeInfo.get(id) == null) { + Map> quadList = new HashMap<>(); + quadList.put("quads", new ArrayList<>()); + quadList.get("quads").add(quad); + this.blankNodeInfo.put(id, quadList); + } else { + this.blankNodeInfo.get(id).get("quads").add(quad); + } + } + } + + + } + + /* 3) Create a list of non-normalized blank node identifiers and + * populate it using the keys from the blank node to quads map. + */ + + List nonNormalized = new ArrayList<>(); + nonNormalized.addAll(blankNodeInfo.keySet()); + //Collections.sort(nonNormalized); + + /* 4) Initialize simple, a boolean flag, to true. + * STATUS : if this does not work we have a serious problem + */ + boolean simple = true; + + /* + * 5) While simple is true, issue canonical identifiers for blank nodes: + */ + + while (simple) { + // 5.1) Set simple to false. + simple = false; + + // 5.2) Clear hash to blank nodes map. + this.hashToBlankNodes.clear(); + + /* + * 5.3) For each blank node identifier identifier in non-normalized + * identifiers: + * STATUS : working on it + */ + for (String id : nonNormalized) { + String hash = hashFirstDegreeQuads(id); + + if (this.hashToBlankNodes.containsKey(hash)) { + this.hashToBlankNodes.get(hash).add(id); + } else { + List idList = new ArrayList<>(); + idList.add(id); + this.hashToBlankNodes.put(hash, idList); + } + } + + /* + * 5.4) For each hash to identifier list mapping in hash to blank + * nodes map, lexicographically-sorted by hash: + */ + for (String hash : NormalizeUtils.sortMapKeys(this.hashToBlankNodes)) { + List idList = this.hashToBlankNodes.get(hash); + if (idList.size() > 1) { + continue; + } + + /* 5.4.2) Use the Issue Identifier algorithm, passing canonical + * issuer and the single blank node identifier in identifier + * list, identifier, to issue a canonical replacement identifier + * for identifier. + */ + + String id = idList.get(0); + + this.canonicalIssuer.getId(id); + + // 5.4.3) Remove identifier from non-normalized identifiers. + nonNormalized.remove(id); + + // 5.4.4) Remove hash from the hash to blank nodes map. + + this.hashToBlankNodes.remove(hash); + + // 5.4.5) Set simple to true. + + simple = true; + + } + } + + /* + * 6) For each hash to identifier list mapping in hash to blank nodes + * map, lexicographically-sorted by hash: + * STATUS: does not loop through it + */ + for (String hash : NormalizeUtils.sortMapKeys(this.hashToBlankNodes)) { + List idList = this.hashToBlankNodes.get(hash); + + /* + * 6.1) Create hash path list where each item will be a result of + * running the Hash N-Degree Quads algorithm. + */ + List> hashPathList = new ArrayList<>(); + + /* + * 6.2) For each blank node identifier identifier in identifier + * list: + */ + + for (String id : idList) { + /* + * 6.2.1) If a canonical identifier has already been issued for + * identifier, continue to the next identifier. + */ + + if (this.canonicalIssuer.hasID(id)) { + continue; + } + + /* + * 6.2.2) Create temporary issuer, an identifier issuer + * initialized with the prefix _:b. + */ + + IdentifierIssuer issuer = new IdentifierIssuer("_:b"); + + /* + * 6.2.3) Use the Issue Identifier algorithm, passing temporary + * issuer and identifier, to issue a new temporary blank node + * identifier for identifier. + */ + + issuer.getId(id); + + /* + * 6.2.4) Run the Hash N-Degree Quads algorithm, passing + * temporary issuer, and append the result to the hash path + * list. + */ + + + hashPathList.add(hashNDegreeQuads(issuer, id)); + } + + /* + * 6.3) For each result in the hash path list, + * lexicographically-sorted by the hash in result: + */ + + NormalizeUtils.sortMapList(hashPathList); + for (Map result : hashPathList) { // need to check out in python + if (result.get("issuer") != null) { + for (String existing : ((IdentifierIssuer) result.get("issuer")).getOrder()) { + + this.canonicalIssuer.getId(existing); + } + } + + } + + } + + /* + * Note: At this point all blank nodes in the set of RDF quads have been + * assigned canonical identifiers, which have been stored in the + * canonical issuer. Here each quad is updated by assigning each of its + * blank nodes its new identifier. + */ + + // 7) For each quad, quad, in input dataset: + List normalized = new ArrayList<>(); + for (Map> quadMap : this.quads) { + /* + * Create a copy, quad copy, of quad and replace any existing + * blank node identifiers using the canonical identifiers previously + * issued by canonical issuer. Note: We optimize away the copy here. + * STATUS : currently working on it + */ + for (String key : quadMap.keySet()) { + if (key.equals("predicate")) { + continue; + } else { + Map component = quadMap.get(key); + if (component.get("type").equals("blank node") && !component.get("value").startsWith(this + .canonicalIssuer.getPrefix())) { + component.put("value", this.canonicalIssuer.getId(component.get("value"))); + } + } + } + + // 7.2) Add quad copy to the normalized dataset. + String q = quadMap.containsKey("name") && quadMap.get("name") != null + ? (quadMap.get("name")) + .get("value") + : null; + RDFDataset.Quad quad = new RDFDataset.Quad(quadMap, q); + normalized.add(RDFDatasetUtils.toNQuad(quad, quadMap.containsKey("name") && quadMap.get("name") != null + ? (quadMap.get("name")) + .get("value") + : null)); + + } + + // 8) Return the normalized dataset. + Collections.sort(normalized); + if (this.options.format != null) { + if ("application/nquads".equals(this.options.format)) { + StringBuilder rval = new StringBuilder(); + for (String n : normalized) { + rval.append(n); + } + return rval.toString(); + } else { // will need to implement error handling + } + } else { + StringBuilder rval = new StringBuilder(); + for (final String n : normalized) { + rval.append(n); + } + try { + return parseNQuads(rval.toString()); + } catch (JsonLdError jsonLdError) { + jsonLdError.printStackTrace(); + } + } + return null; + } + + /* + * STATUS : working on it + */ + private String hashFirstDegreeQuads(String id) { + // return cached hash + Map> info = this.blankNodeInfo.get(id); + if (info.containsKey("hash")) { + return String.valueOf(info.get("hash")); + } + + // 1) Initialize nquads to an empty list. It will be used to store quads + // in N-Quads format. + List nquads = new ArrayList<>(); + + // 2) Get the list of quads quads associated with the reference blank + // node identifier in the blank node to quads map. + + List quads = info.get("quads"); + + // 3) For each quad quad in quads: + for (Object quad : quads) { + // 3.1) Serialize the quad in N-Quads format with the following + // special rule: + + // 3.1.1) If any component in quad is an blank node, then serialize + // it using a special identifier as follows: + + // copy = {} + + Map> copy = new HashMap<>(); + + /* 3.1.2) If the blank node's existing blank node identifier + * matches the reference blank node identifier then use the + * blank node identifier _:a, otherwise, use the blank node + * identifier _:z. + * STATUS: working + */ + + RDFDataset.Quad quadMap = (RDFDataset.Quad) quad; + for (String key : quadMap.keySet()) { + Map component = (Map) quadMap.get(key); + if (key.equals("predicate")) { + copy.put(key, component); + continue; + } + copy.put(key, modifyFirstDegreeComponent(component, id)); + } + String cq = copy.containsKey("name") && copy.get("name") != null + ? (copy.get("name")) + .get("value") + : null; + RDFDataset.Quad copyQuad = new RDFDataset.Quad(copy, cq); + nquads.add(RDFDatasetUtils.toNQuad(copyQuad, copyQuad.containsKey("name") && copyQuad.get("name") != null + ? (String) ((Map) copyQuad.get("name")) + .get("value") + : null)); + } + // 4) Sort nquads in lexicographical order. + + Collections.sort(nquads); + // 5) Return the hash that results from passing the sorted, joined + // nquads through the hash algorithm. + + return NormalizeUtils.sha256HashnQuads(nquads); + } + + private Map hashNDegreeQuads(IdentifierIssuer issuer, String id) { + /* + * 1) Create a hash to related blank nodes map for storing hashes that + * identify related blank nodes. + * Note: 2) and 3) handled within `createHashToRelated` + */ + + Map> hashToRelated = this.createHashToRelated(issuer, id); + + /* + * 4) Create an empty string, data to hash. + * Note: We create a hash object instead. + */ + + String mdString = ""; + + /* + * 5) For each related hash to blank node list mapping in hash to + * related blank nodes map, sorted lexicographically by related hash: + */ + NormalizeUtils.sortMapKeys(hashToRelated); // sort hashToRelated in lexical order + for (String hash : hashToRelated.keySet()) { + List blankNodes = hashToRelated.get(hash); + // 5.1) Append the related hash to the data to hash. + mdString += hash; + + // 5.2) Create a string chosen path. + + String chosenPath = " "; + + // 5.3) Create an unset chosen issuer variable. + + IdentifierIssuer chosenIssuer = null; + + // 5.4) For each permutation of blank node list: + + String path = ""; + List recursionList = null; + IdentifierIssuer issuerCopy = null; + boolean skipToNextPerm = false; + NormalizeUtils.Permutator permmutator = new NormalizeUtils.Permutator(blankNodes); + + while (permmutator.hasNext()) { + List permutation = permmutator.next(); + // 5.4.1) Create a copy of issuer, issuer copy. + + issuerCopy = (IdentifierIssuer) issuer.clone(); + + // 5.4.2) Create a string path. + + path = ""; + + /* + * 5.4.3) Create a recursion list, to store blank node + * identifiers that must be recursively processed by this + * algorithm. + */ + + recursionList = new ArrayList<>(); + + // 5.4.4) For each related in permutation: + + for (String related : permutation) { + /* + * 5.4.4.1) If a canonical identifier has been issued for + * related, append it to path. + */ + + if (this.canonicalIssuer.hasID(related)) { + path += this.canonicalIssuer.getId(related); + } + // 5.4.4.2) Otherwise: + else { + /* + * 5.4.4.2.1) If issuer copy has not issued an + * identifier for related, append related to recursion + * list. + */ + + if (!issuerCopy.hasID(related)) { + recursionList.add(related); + } + + /* + * 5.4.4.2.2) Use the Issue Identifier algorithm, + * passing issuer copy and related and append the result + * to path. + */ + + path += issuerCopy.getId(related); + } + + /* + * 5.4.4.3) If chosen path is not empty and the length of + * path is greater than or equal to the length of chosen + * path and path is lexicographically greater than chosen + * path, then skip to the next permutation. + */ + + if (chosenPath.length() != 0 && path.length() >= chosenPath.length() + && path.compareTo(chosenPath) == 1) { + skipToNextPerm = true; + break; + } + + } + + } + if (skipToNextPerm) { + continue; + } + + // 5.4.5) For each related in recursion list: + + for (String related : recursionList) { + /* + * 5.4.5.1) Set result to the result of recursively + * executing the Hash N-Degree Quads algorithm, passing + * related for identifier and issuer copy for path + * identifier issuer. + */ + + Map result = hashNDegreeQuads(issuerCopy, related); + + /* + * 5.4.5.2) Use the Issue Identifier algorithm, passing + * issuer copy and related and append the result to path. + */ + + path += '<' + (String) result.get("hash") + '>'; + + /* + * 5.4.5.4) Set issuer copy to the identifier issuer in + * result. + */ + + issuerCopy = (IdentifierIssuer) result.get("issuer"); + + /* + * 5.4.5.5) If chosen path is not empty and the length of + * path is greater than or equal to the length of chosen + * path and path is lexicographically greater than chosen + * path, then skip to the next permutation. + */ + + if (chosenPath.length() != 0 && path.length() >= chosenPath.length() + && path.compareTo(chosenPath) == 1) {// need to check out if path > chosenpath + skipToNextPerm = true; + break; + } + } + + if (skipToNextPerm) { + continue; + } + + /* + * 5.4.6) If chosen path is empty or path is lexicographically + * less than chosen path, set chosen path to path and chosen + * issuer to issuer copy. + */ + + if (chosenPath.length() == 0 || path.compareTo(chosenPath) == -1) { + chosenPath = path; + chosenIssuer = issuerCopy; + } + + // 5.5) Append chosen path to data to hash. + mdString += chosenPath; + + // 5.6) Replace issuer, by reference, with chosen issuer. + issuer = chosenIssuer; + + /* + 6) Return issuer and the hash that results from passing data to hash + * through the hash algorithm. + */ + + + } + /* + * 6) Return issuer and the hash that results from passing data to hash + * through the hash algorithm. + */ + + Map hashQuad = new HashMap<>(); + hashQuad.put("hash", NormalizeUtils.sha256Hash(mdString.getBytes())); + hashQuad.put("issuer", issuer); + + return hashQuad; + } + + private Map> createHashToRelated(IdentifierIssuer issuer, String id) { + /* + * 1) Create a hash to related blank nodes map for storing hashes that + * identify related blank nodes. + */ + + List quads = this.blankNodeInfo.get(id).get("quads"); + + Map> hashToRelated = new HashMap<>(); + + /* + * 2) Get a reference, quads, to the list of quads in the blank node to + * quads map for the key identifier. + * Already in parameter + */ + + // 3) For each quad in quads: + + for (Object quad : quads) { + /* + * 3.1) For each component in quad, if component is the subject, + * object, and graph name and it is a blank node that is not + * identified by identifier: + */ + Map> quadMap = (Map>) quad; + + for (String key : quadMap.keySet()) { + Map component = quadMap.get(key); + if (!key.equals("predicate") && component.get("type").equals("blank node") + && !component.get("value").equals(id)) { + + /* + * 3.1.1) Set hash to the result of the Hash Related Blank + * Node algorithm, passing the blank node identifier for + * component as related, quad, path identifier issuer as + * issuer, and position as either s, o, or g based on + * whether component is a subject, object, graph name, + * respectively. + */ + + String related = component.get("value"); + String position = QUAD_POSITIONS.get(key); + + String hash = hashRelateBlankNode(related, quadMap, issuer, position); + + if (hashToRelated.containsKey(hash)) { + hashToRelated.get(hash).add(related); + } else { + List relatedList = new ArrayList<>(); + relatedList.add(related); + hashToRelated.put(hash, relatedList); + } + + } + + } + + } + + return hashToRelated; + } + + private String hashRelateBlankNode(String related, Map> quad, + IdentifierIssuer issuer, + String position) { + /* + * 1) Set the identifier to use for related, preferring first the + * canonical identifier for related if issued, second the identifier + * issued by issuer if issued, and last, if necessary, the result of + * the Hash First Degree Quads algorithm, passing related. + */ + + String id; + if (this.canonicalIssuer.hasID(related)) { + id = this.canonicalIssuer.getId(related); + } else if (issuer.hasID(related)) { + id = issuer.getId(related); + } else { + id = hashFirstDegreeQuads(related); + } + + /* + * 2) Initialize a string input to the value of position. + * Note: We use a hash object instead. + */ + + if (!position.equals("g")) { + return NormalizeUtils.sha256Hash((position + getRelatedPredicate(quad) + id).getBytes()); + } else { + return NormalizeUtils.sha256Hash((position + id).getBytes()); + } + + } + + private static Map modifyFirstDegreeComponent(Map component, String id) { + + if (!component.get("type").equals("blank node")) { + return component; + } + Map componentClone = (Map) JsonLdUtils.clone(component); + if (componentClone.get("value").equals(id)) { + componentClone.put("value", "_:a"); + } else { + componentClone.put("value", "_:z"); + } + return componentClone; + } + + private String getRelatedPredicate(Map> quad) { + return "<" + quad.get("predicate").get("value") + ">"; + + } + + +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/impl/NQuadRDFParser.java b/jsonldjava/src/main/java/com/github/jsonldjava/impl/NQuadRDFParser.java new file mode 100644 index 0000000..5209cac --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/impl/NQuadRDFParser.java @@ -0,0 +1,19 @@ +package com.github.jsonldjava.impl; + +import com.github.jsonldjava.core.JsonLdError; +import com.github.jsonldjava.core.RDFDataset; +import com.github.jsonldjava.core.RDFDatasetUtils; +import com.github.jsonldjava.core.RDFParser; + +public class NQuadRDFParser implements RDFParser { + @Override + public RDFDataset parse(Object input) throws JsonLdError { + if (input instanceof String) { + return RDFDatasetUtils.parseNQuads((String) input); + } else { + throw new JsonLdError(JsonLdError.Error.INVALID_INPUT, + "NQuad Parser expected string input."); + } + } + +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/impl/NQuadTripleCallback.java b/jsonldjava/src/main/java/com/github/jsonldjava/impl/NQuadTripleCallback.java new file mode 100644 index 0000000..2fdef55 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/impl/NQuadTripleCallback.java @@ -0,0 +1,12 @@ +package com.github.jsonldjava.impl; + +import com.github.jsonldjava.core.JsonLdTripleCallback; +import com.github.jsonldjava.core.RDFDataset; +import com.github.jsonldjava.core.RDFDatasetUtils; + +public class NQuadTripleCallback implements JsonLdTripleCallback { + @Override + public Object call(RDFDataset dataset) { + return RDFDatasetUtils.toNQuads(dataset); + } +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/utils/JsonLdUrl.java b/jsonldjava/src/main/java/com/github/jsonldjava/utils/JsonLdUrl.java new file mode 100755 index 0000000..a215383 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/utils/JsonLdUrl.java @@ -0,0 +1,315 @@ +package com.github.jsonldjava.utils; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class JsonLdUrl { + + public String href = ""; + public String protocol = ""; + public String host = ""; + public String auth = ""; + public String user = ""; + public String password = ""; + public String hostname = ""; + public String port = ""; + public String relative = ""; + public String path = ""; + public String directory = ""; + public String file = ""; + public String query = ""; + public String hash = ""; + + // things not populated by the regex (NOTE: i don't think it matters if + // these are null or "" to start with) + public String pathname = null; + public String normalizedPath = null; + public String authority = null; + + private static Pattern parser = Pattern.compile( + "^(?:([^:\\/?#]+):)?(?:\\/\\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\\/?#]*)(?::(\\d*))?))?((((?:[^?#\\/]*\\/)*)([^?#]*))(?:\\?([^#]*))?(?:#(.*))?)"); + + public static JsonLdUrl parse(String url) { + final JsonLdUrl rval = new JsonLdUrl(); + rval.href = url; + + final Matcher matcher = parser.matcher(url); + if (matcher.matches()) { + if (matcher.group(1) != null) { + rval.protocol = matcher.group(1); + } + if (matcher.group(2) != null) { + rval.host = matcher.group(2); + } + if (matcher.group(3) != null) { + rval.auth = matcher.group(3); + } + if (matcher.group(4) != null) { + rval.user = matcher.group(4); + } + if (matcher.group(5) != null) { + rval.password = matcher.group(5); + } + if (matcher.group(6) != null) { + rval.hostname = matcher.group(6); + } + if (matcher.group(7) != null) { + rval.port = matcher.group(7); + } + if (matcher.group(8) != null) { + rval.relative = matcher.group(8); + } + if (matcher.group(9) != null) { + rval.path = matcher.group(9); + } + if (matcher.group(10) != null) { + rval.directory = matcher.group(10); + } + if (matcher.group(11) != null) { + rval.file = matcher.group(11); + } + if (matcher.group(12) != null) { + rval.query = matcher.group(12); + } + if (matcher.group(13) != null) { + rval.hash = matcher.group(13); + } + + // normalize to node.js API + if (!"".equals(rval.host) && "".equals(rval.path)) { + rval.path = "/"; + } + rval.pathname = rval.path; + parseAuthority(rval); + rval.normalizedPath = removeDotSegments(rval.pathname, !"".equals(rval.authority)); + if (!"".equals(rval.query)) { + rval.path += "?" + rval.query; + } + if (!"".equals(rval.protocol)) { + rval.protocol += ":"; + } + if (!"".equals(rval.hash)) { + rval.hash = "#" + rval.hash; + } + return rval; + } + + return rval; + } + + /** + * Removes dot segments from a JsonLdUrl path. + * + * @param path + * the path to remove dot segments from. + * @param hasAuthority + * true if the JsonLdUrl has an authority, false if not. + * @return The URL without the dot segments + */ + public static String removeDotSegments(String path, boolean hasAuthority) { + String rval = ""; + + if (path.indexOf("/") == 0) { + rval = "/"; + } + + // RFC 3986 5.2.4 (reworked) + final List input = new ArrayList(Arrays.asList(path.split("/"))); + if (path.endsWith("/")) { + // javascript .split includes a blank entry if the string ends with + // the delimiter, java .split does not so we need to add it manually + input.add(""); + } + final List output = new ArrayList(); + for (int i = 0; i < input.size(); i++) { + if (".".equals(input.get(i)) || ("".equals(input.get(i)) && input.size() - i > 1)) { + // input.remove(0); + continue; + } + if ("..".equals(input.get(i))) { + // input.remove(0); + if (hasAuthority + || (output.size() > 0 && !"..".equals(output.get(output.size() - 1)))) { + // [].pop() doesn't fail, to replicate this we need to check + // that there is something to remove + if (output.size() > 0) { + output.remove(output.size() - 1); + } + } else { + output.add(".."); + } + continue; + } + output.add(input.get(i)); + // input.remove(0); + } + + if (output.size() > 0) { + rval += output.get(0); + for (int i = 1; i < output.size(); i++) { + rval += "/" + output.get(i); + } + } + return rval; + } + + public static String removeBase(Object baseobj, String iri) { + if (baseobj == null) { + return iri; + } + + JsonLdUrl base; + if (baseobj instanceof String) { + base = JsonLdUrl.parse((String) baseobj); + } else { + base = (JsonLdUrl) baseobj; + } + + // establish base root + String root = ""; + if (!"".equals(base.href)) { + root += (base.protocol) + "//" + base.authority; + } + // support network-path reference with empty base + else if (iri.indexOf("//") != 0) { + root += "//"; + } + + // IRI not relative to base + if (iri.indexOf(root) != 0) { + return iri; + } + + // remove root from IRI and parse remainder + final JsonLdUrl rel = JsonLdUrl.parse(iri.substring(root.length())); + + // remove path segments that match + final List baseSegments = new ArrayList( + Arrays.asList(base.normalizedPath.split("/"))); + if (base.normalizedPath.endsWith("/")) { + baseSegments.add(""); + } + final List iriSegments = new ArrayList( + Arrays.asList(rel.normalizedPath.split("/"))); + if (rel.normalizedPath.endsWith("/")) { + iriSegments.add(""); + } + + while (baseSegments.size() > 0 && iriSegments.size() > 0) { + if (!baseSegments.get(0).equals(iriSegments.get(0))) { + break; + } + if (baseSegments.size() > 0) { + baseSegments.remove(0); + } + if (iriSegments.size() > 0) { + iriSegments.remove(0); + } + } + + // use '../' for each non-matching base segment + String rval = ""; + if (baseSegments.size() > 0) { + // don't count the last segment if it isn't a path (doesn't end in + // '/') + // don't count empty first segment, it means base began with '/' + if (!base.normalizedPath.endsWith("/") || "".equals(baseSegments.get(0))) { + baseSegments.remove(baseSegments.size() - 1); + } + for (int i = 0; i < baseSegments.size(); ++i) { + rval += "../"; + } + } + + // prepend remaining segments + if (iriSegments.size() > 0) { + rval += iriSegments.get(0); + } + for (int i = 1; i < iriSegments.size(); i++) { + rval += "/" + iriSegments.get(i); + } + + // add query and hash + if (!"".equals(rel.query)) { + rval += "?" + rel.query; + } + if (!"".equals(rel.hash)) { + rval += rel.hash; + } + + if ("".equals(rval)) { + rval = "./"; + } + + return rval; + } + + public static String resolve(String baseUri, String pathToResolve) { + // TODO: some input will need to be normalized to perform the expected + // result with java + // TODO: we can do this without using java URI! + if (baseUri == null) { + return pathToResolve; + } + if (pathToResolve == null || "".equals(pathToResolve.trim())) { + return baseUri; + } + try { + URI uri = new URI(baseUri); + // query string parsing + if (pathToResolve.startsWith("?")) { + // drop fragment from uri if it has one + if (uri.getFragment() != null) { + uri = new URI(uri.getScheme(), uri.getAuthority(), uri.getPath(), null, null); + } + // add query to the end manually (as URI.resolve does it wrong) + return uri.toString() + pathToResolve; + } + + uri = uri.resolve(pathToResolve); + // java doesn't discard unnecessary dot segments + String path = uri.getPath(); + if (path != null) { + path = JsonLdUrl.removeDotSegments(uri.getPath(), true); + } + return new URI(uri.getScheme(), uri.getAuthority(), path, uri.getQuery(), + uri.getFragment()).toString(); + } catch (final URISyntaxException e) { + return null; + } + } + + /** + * Parses the authority for the pre-parsed given JsonLdUrl. + * + * @param parsed + * the pre-parsed JsonLdUrl. + */ + private static void parseAuthority(JsonLdUrl parsed) { + // parse authority for unparsed relative network-path reference + if (parsed.href.indexOf(":") == -1 && parsed.href.indexOf("//") == 0 + && "".equals(parsed.host)) { + // must parse authority from pathname + parsed.pathname = parsed.pathname.substring(2); + final int idx = parsed.pathname.indexOf("/"); + if (idx == -1) { + parsed.authority = parsed.pathname; + parsed.pathname = ""; + } else { + parsed.authority = parsed.pathname.substring(0, idx); + parsed.pathname = parsed.pathname.substring(idx); + } + } else { + // construct authority + parsed.authority = parsed.host; + if (!"".equals(parsed.auth)) { + parsed.authority = parsed.auth + "@" + parsed.authority; + } + } + } +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/utils/JsonNormalizer.java b/jsonldjava/src/main/java/com/github/jsonldjava/utils/JsonNormalizer.java new file mode 100644 index 0000000..017a27d --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/utils/JsonNormalizer.java @@ -0,0 +1,50 @@ +package com.github.jsonldjava.utils; + +import android.os.AsyncTask; +import com.github.jsonldjava.core.JsonLdError; +import com.github.jsonldjava.core.JsonLdOptions; +import com.github.jsonldjava.core.JsonLdProcessor; + +public class JsonNormalizer extends AsyncTask { + + private JsonLdOptions options; + private OnNormalizedCompleted listener; + private Object normalizedObject; + private Object initialObject; + + public JsonNormalizer(Object object, OnNormalizedCompleted listener) { + this(object, null, listener); + } + + public JsonNormalizer(Object object, JsonLdOptions options, OnNormalizedCompleted listener) { + if (options == null) { + this.options = new JsonLdOptions(); + this.options.format = "application/nquads"; + } + this.listener = listener; + this.initialObject = object; + } + + public interface OnNormalizedCompleted { + void OnNormalizedComplete(Object object); + } + + @Override + protected Void doInBackground(Void... voids) { + try { + + options.setAlgorithm(JsonLdOptions.URDNA2015); + this.normalizedObject = JsonLdProcessor.normalize(this.initialObject, this.options); + } catch (JsonLdError jsonLdError) { + jsonLdError.printStackTrace(); + } + return null; + } + + @Override + protected void onPostExecute(Void result) { + this.listener.OnNormalizedComplete(this.normalizedObject); + + } + +} diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/utils/JsonUtils.java b/jsonldjava/src/main/java/com/github/jsonldjava/utils/JsonUtils.java new file mode 100644 index 0000000..067b122 --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/utils/JsonUtils.java @@ -0,0 +1,251 @@ +package com.github.jsonldjava.utils; + +import com.fasterxml.jackson.core.*; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.jsonldjava.core.JsonLdApi; +import com.github.jsonldjava.core.JsonLdProcessor; + +import javax.net.ssl.HttpsURLConnection; +import java.io.*; +import java.net.HttpURLConnection; +import java.util.List; +import java.util.Map; + + +public class JsonUtils { + /** + * An HTTP Accept header that prefers JSONLD. + */ + public static final String ACCEPT_HEADER = "application/ld+json, application/json;q=0.9, application/javascript;q=0.5, text/javascript;q=0.5, text/plain;q=0.2, */*;q=0.1"; + private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); + private static final JsonFactory JSON_FACTORY = new JsonFactory(JSON_MAPPER); + //private static volatile CloseableHttpClient DEFAULT_HTTP_CLIENT; + + static { + // Disable default Jackson behaviour to close + // InputStreams/Readers/OutputStreams/Writers + JSON_FACTORY.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET); + // Disable string retention features that may work for most JSON where + // the field names are in limited supply, but does not work for JSON-LD + // where a wide range of URIs are used for subjects and predicates + JSON_FACTORY.disable(JsonFactory.Feature.INTERN_FIELD_NAMES); + JSON_FACTORY.disable(JsonFactory.Feature.CANONICALIZE_FIELD_NAMES); + } + + /** + * Parses a JSON-LD document from the given {@link InputStream} to an object + * that can be used as input for the {@link JsonLdApi} and + * {@link JsonLdProcessor} methods.
+ * Uses UTF-8 as the character encoding when decoding the InputStream. + * + * @param input The JSON-LD document in an InputStream. + * @return A JSON Object. + * @throws JsonParseException If there was a JSON related error during parsing. + * @throws IOException If there was an IO error during parsing. + */ + public static Object fromInputStream(InputStream input) throws IOException { + // no readers from inputstreams w.o. encoding!! + return fromInputStream(input, "UTF-8"); + } + + /** + * Parses a JSON-LD document from the given {@link InputStream} to an object + * that can be used as input for the {@link JsonLdApi} and + * {@link JsonLdProcessor} methods. + * + * @param input The JSON-LD document in an InputStream. + * @param enc The character encoding to use when interpreting the characters + * in the InputStream. + * @return A JSON Object. + * @throws JsonParseException If there was a JSON related error during parsing. + * @throws IOException If there was an IO error during parsing. + */ + public static Object fromInputStream(InputStream input, String enc) throws IOException { + InputStreamReader in = new InputStreamReader(input, enc); + BufferedReader reader = new BufferedReader(in); + return fromReader(reader); + + /*try (InputStreamReader in = new InputStreamReader(input, enc); + BufferedReader reader = new BufferedReader(in);) { + return fromReader(reader); + }*/ + } + + /** + * Parses a JSON-LD document from the given {@link Reader} to an object that + * can be used as input for the {@link JsonLdApi} and + * {@link JsonLdProcessor} methods. + * + * @param reader The JSON-LD document in a Reader. + * @return A JSON Object. + * @throws JsonParseException If there was a JSON related error during parsing. + * @throws IOException If there was an IO error during parsing. + */ + public static Object fromReader(Reader reader) throws IOException { + final JsonParser jp = JSON_FACTORY.createParser(reader); + Object rval; + final JsonToken initialToken = jp.nextToken(); + + if (initialToken == JsonToken.START_ARRAY) { + rval = jp.readValueAs(List.class); + } else if (initialToken == JsonToken.START_OBJECT) { + rval = jp.readValueAs(Map.class); + } else if (initialToken == JsonToken.VALUE_STRING) { + rval = jp.readValueAs(String.class); + } else if (initialToken == JsonToken.VALUE_FALSE || initialToken == JsonToken.VALUE_TRUE) { + rval = jp.readValueAs(Boolean.class); + } else if (initialToken == JsonToken.VALUE_NUMBER_FLOAT + || initialToken == JsonToken.VALUE_NUMBER_INT) { + rval = jp.readValueAs(Number.class); + } else if (initialToken == JsonToken.VALUE_NULL) { + rval = null; + } else { + throw new JsonParseException(jp, + "document doesn't start with a valid json element : " + initialToken, + jp.getCurrentLocation()); + } + + JsonToken t; + try { + t = jp.nextToken(); + } catch (final JsonParseException ex) { + throw new JsonParseException(jp, + "Document contains more content after json-ld element - (possible mismatched {}?)", + jp.getCurrentLocation()); + } + if (t != null) { + throw new JsonParseException(jp, + "Document contains possible json content after the json-ld element - (possible mismatched {}?)", + jp.getCurrentLocation()); + } + return rval; + } + + /** + * Parses a JSON-LD document from a string to an object that can be used as + * input for the {@link JsonLdApi} and {@link JsonLdProcessor} methods. + * + * @param jsonString The JSON-LD document as a string. + * @return A JSON Object. + * @throws JsonParseException If there was a JSON related error during parsing. + * @throws IOException If there was an IO error during parsing. + */ + public static Object fromString(String jsonString) throws JsonParseException, IOException { + return fromReader(new StringReader(jsonString)); + } + + /** + * Writes the given JSON-LD Object out to a String, using indentation and + * new lines to improve readability. + * + * @param jsonObject The JSON-LD Object to serialize. + * @return A JSON document serialised to a String. + * @throws JsonGenerationException If there is a JSON error during serialization. + * @throws IOException If there is an IO error during serialization. + */ + public static String toPrettyString(Object jsonObject) + throws JsonGenerationException, IOException { + final StringWriter sw = new StringWriter(); + writePrettyPrint(sw, jsonObject); + return sw.toString(); + } + + /** + * Writes the given JSON-LD Object out to a String. + * + * @param jsonObject The JSON-LD Object to serialize. + * @return A JSON document serialised to a String. + * @throws JsonGenerationException If there is a JSON error during serialization. + * @throws IOException If there is an IO error during serialization. + */ + public static String toString(Object jsonObject) throws JsonGenerationException, IOException { + final StringWriter sw = new StringWriter(); + write(sw, jsonObject); + return sw.toString(); + } + + /** + * Writes the given JSON-LD Object out to the given Writer. + * + * @param writer The writer that is to receive the serialized JSON-LD object. + * @param jsonObject The JSON-LD Object to serialize. + * @throws JsonGenerationException If there is a JSON error during serialization. + * @throws IOException If there is an IO error during serialization. + */ + public static void write(Writer writer, Object jsonObject) + throws JsonGenerationException, IOException { + final JsonGenerator jw = JSON_FACTORY.createGenerator(writer); + jw.writeObject(jsonObject); + } + + /** + * Writes the given JSON-LD Object out to the given Writer, using + * indentation and new lines to improve readability. + * + * @param writer The writer that is to receive the serialized JSON-LD object. + * @param jsonObject The JSON-LD Object to serialize. + * @throws JsonGenerationException If there is a JSON error during serialization. + * @throws IOException If there is an IO error during serialization. + */ + public static void writePrettyPrint(Writer writer, Object jsonObject) + throws JsonGenerationException, IOException { + final JsonGenerator jw = JSON_FACTORY.createGenerator(writer); + jw.useDefaultPrettyPrinter(); + jw.writeObject(jsonObject); + } + + /** + * Parses a JSON-LD document, from the contents of the JSON resource + * resolved from the JsonLdUrl, to an object that can be used as input for + * the {@link JsonLdApi} and {@link JsonLdProcessor} methods. + * + * @param url The JsonLdUrl to resolve + * @return A JSON Object. + * @throws JsonParseException If there was a JSON related error during parsing. + * @throws IOException If there was an IO error during parsing. + */ + public static Object fromURL(java.net.URL url) + throws JsonParseException, IOException { + final String protocol = url.getProtocol(); + // We can only use the Apache HTTPClient for HTTP/HTTPS, so use the + // native java client for the others + InputStream in = null; + try { + if (!protocol.equalsIgnoreCase("http") && !protocol.equalsIgnoreCase("https")) { + // Can't use the HTTP client for those! + // Fallback to Java's built-in JsonLdUrl handler. No need for + // Accept headers as it's likely to be file: or jar: + in = url.openStream(); + } else if (protocol.equalsIgnoreCase("http")) { + HttpURLConnection httpURLConnection = (HttpURLConnection) url.openConnection(); + httpURLConnection.setRequestProperty("Accept", ACCEPT_HEADER); + in = httpURLConnection.getInputStream(); + + } else if (protocol.equalsIgnoreCase("https")) { + HttpsURLConnection httpsURLConnection = (HttpsURLConnection) url.openConnection(); + httpsURLConnection.setRequestProperty("Accept", ACCEPT_HEADER); + in = httpsURLConnection.getInputStream(); + + } + return fromInputStream(in); + } finally { + if (in != null) { + in.close(); + } + } + } + +} + + + + + + + + + + + + + diff --git a/jsonldjava/src/main/java/com/github/jsonldjava/utils/Obj.java b/jsonldjava/src/main/java/com/github/jsonldjava/utils/Obj.java new file mode 100644 index 0000000..4e7e36b --- /dev/null +++ b/jsonldjava/src/main/java/com/github/jsonldjava/utils/Obj.java @@ -0,0 +1,108 @@ +package com.github.jsonldjava.utils; + +import java.util.LinkedHashMap; +import java.util.Map; + +public class Obj { + + /** + * Helper function for creating maps and tuning them as necessary. + * + * @return A new {@link Map} instance. + */ + public static Map newMap() { + return new LinkedHashMap(4, 0.75f); + } + + /** + * Helper function for creating maps and tuning them as necessary. + * + * @param key + * A key to add to the map on creation. + * @param value + * A value to attach to the key in the new map. + * @return A new {@link Map} instance. + */ + public static Map newMap(String key, Object value) { + final Map result = newMap(); + result.put(key, value); + return result; + } + + /** + * Used to make getting values from maps embedded in maps embedded in maps + * easier TODO: roll out the loops for efficiency + * + * @param map + * The map to get a key from + * @param keys + * The list of keys to attempt to get from the map. The first key + * found with a non-null value is returned, or if none are found, + * the original map is returned. + * @return The key from the map, or the original map if none of the keys are + * found. + */ + public static Object get(Map map, String... keys) { + Map result = map; + for (final String key : keys) { + result = (Map) map.get(key); + // make sure we don't crash if we get a null somewhere down the line + if (result == null) { + return result; + } + } + return result; + } + + public static Object put(Object map, String key1, Object value) { + ((Map) map).put(key1, value); + return map; + } + + public static Object put(Object map, String key1, String key2, Object value) { + ((Map) ((Map) map).get(key1)).put(key2, value); + return map; + } + + public static Object put(Object map, String key1, String key2, String key3, Object value) { + ((Map) ((Map) ((Map) map).get(key1)) + .get(key2)).put(key3, value); + return map; + } + + public static Object put(Object map, String key1, String key2, String key3, String key4, + Object value) { + ((Map) ((Map) ((Map) ((Map) map) + .get(key1)).get(key2)).get(key3)).put(key4, value); + return map; + } + + public static boolean contains(Object map, String... keys) { + for (final String key : keys) { + map = ((Map) map).get(key); + if (map == null) { + return false; + } + } + return true; + } + + public static Object remove(Object map, String k1, String k2) { + return ((Map) ((Map) map).get(k1)).remove(k2); + } + + /** + * A null-safe equals check using v1.equals(v2) if they are both not null. + * + * @param v1 + * The source object for the equals check. + * @param v2 + * The object to be checked for equality using the first objects + * equals method. + * @return True if the objects were both null. True if both objects were not + * null and v1.equals(v2). False otherwise. + */ + public static boolean equals(Object v1, Object v2) { + return v1 == null ? v2 == null : v1.equals(v2); + } +} diff --git a/jsonldjava/src/main/res/values/strings.xml b/jsonldjava/src/main/res/values/strings.xml new file mode 100644 index 0000000..2ec819d --- /dev/null +++ b/jsonldjava/src/main/res/values/strings.xml @@ -0,0 +1,3 @@ + + jsonldjava + diff --git a/app/src/test/java/com/opdup/btcwallet/ExampleUnitTest.java b/jsonldjava/src/test/java/com/opdup/jsonldjava/ExampleUnitTest.java similarity index 92% rename from app/src/test/java/com/opdup/btcwallet/ExampleUnitTest.java rename to jsonldjava/src/test/java/com/opdup/jsonldjava/ExampleUnitTest.java index f6d3a87..aa0617c 100644 --- a/app/src/test/java/com/opdup/btcwallet/ExampleUnitTest.java +++ b/jsonldjava/src/test/java/com/opdup/jsonldjava/ExampleUnitTest.java @@ -1,4 +1,4 @@ -package com.opdup.btcwallet; +package com.opdup.jsonldjava; import org.junit.Test; diff --git a/settings.gradle b/settings.gradle index e7b4def..67fc6b6 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1 +1,2 @@ -include ':app' +include ':btcrserviceclient' +include ':jsonldjava'