Strabon
changeset 504:b408cde6beca
Added scripts according to George's suggestions. Now conforming to URI representation of hotspots.
Also fixed minor bug regarding date values (month was printed instead of minutes in the corresponding slot) in both v2.2 and v2.3
Also fixed minor bug regarding date values (month was printed instead of minutes in the corresponding slot) in both v2.2 and v2.3
author | Manos Karpathiotakis <mk@di.uoa.gr> |
---|---|
date | Wed Jul 25 01:59:00 2012 +0300 (2012-07-25) |
parents | ae32e022c593 |
children | bd55cc99cea2 |
files | scripts/v2.2/runNoaRefinementChain.sh scripts/v2.3/count.sparql scripts/v2.3/deleteSeaHotspots.sparql scripts/v2.3/discover.sparql scripts/v2.3/insertMunicipalities.sparql scripts/v2.3/landUseInvalidForFires.sparql scripts/v2.3/refinePartialSeaHotspots.sparql scripts/v2.3/refineTimePersistence.sparql scripts/v2.3/runNoaRefinementChain-papos.sh scripts/v2.3/runNoaRefinementChain.sh |
line diff
1.1 --- a/scripts/v2.2/runNoaRefinementChain.sh Wed Jul 25 01:17:59 2012 +0300 1.2 +++ b/scripts/v2.2/runNoaRefinementChain.sh Wed Jul 25 01:59:00 2012 +0300 1.3 @@ -222,7 +222,7 @@ 1.4 1.5 # refineTimePersistence 1.6 echo -n "Going to refineTimePersistence ${year}-${month}-${day}T${time2}:00 ";echo;echo;echo; 1.7 - min_acquisition_time=`date --date="${year}-${month}-${day} ${time2}:00 EEST -30 minutes" +%Y-%m-%dT%H:%m:00` 1.8 + min_acquisition_time=`date --date="${year}-${month}-${day} ${time2}:00 EEST -30 minutes" +%Y-%m-%dT%H:%M:00` 1.9 query=`echo "${refineTimePersistence}" | sed "s/TIMESTAMP/${year}-${month}-${day}T${time2}:00/g" | \ 1.10 sed "s/PROCESSING_CHAIN/DynamicThresholds/g" | \ 1.11 sed "s/SENSOR/${SENSOR}/g" | \ 1.12 @@ -243,8 +243,8 @@ 1.13 1.14 # discover 1.15 echo -n "Going to discover ${year}-${month}-${day}T${time2}:00 ";echo;echo;echo; 1.16 - min_acquisition_time=`date --date="${year}-${month}-${day} 00:00 EEST" +%Y-%m-%dT%H:%m:00` 1.17 - max_acquisition_time=`date --date="${year}-${month}-${day} 23:59 EEST" +%Y-%m-%dT%H:%m:00` 1.18 + min_acquisition_time=`date --date="${year}-${month}-${day} 00:00 EEST" +%Y-%m-%dT%H:%M:00` 1.19 + max_acquisition_time=`date --date="${year}-${month}-${day} 23:59 EEST" +%Y-%m-%dT%H:%M:00` 1.20 query=`echo "${discover}" | \ 1.21 sed "s/PROCESSING_CHAIN/DynamicThresholds/g" | \ 1.22 sed "s/SENSOR/${SENSOR}/g" | \
2.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 2.2 +++ b/scripts/v2.3/count.sparql Wed Jul 25 01:59:00 2012 +0300 2.3 @@ -0,0 +1,2 @@ 2.4 +SELECT (count(*) AS ?count) 2.5 +WHERE {?s ?p ?o}
3.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 3.2 +++ b/scripts/v2.3/deleteSeaHotspots.sparql Wed Jul 25 01:59:00 2012 +0300 3.3 @@ -0,0 +1,17 @@ 3.4 +PREFIX noa: <http://teleios.di.uoa.gr/ontologies/noaOntology.owl#> 3.5 +PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> 3.6 +PREFIX strdf: <http://strdf.di.uoa.gr/ontology#> 3.7 +PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> 3.8 +PREFIX gag: <http://teleios.di.uoa.gr/ontologies/gagKallikratis.rdf#> 3.9 + 3.10 + 3.11 +INSERT {?h noa:isDiscarded "1"^^xsd:int} 3.12 +WHERE { 3.13 +?h noa:hasAcquisitionTime "TIMESTAMP"^^xsd:dateTime ; 3.14 + noa:producedFromProcessingChain "PROCESSING_CHAIN"^^xsd:string ; 3.15 + noa:isDerivedFromSensor "SENSOR"^^xsd:string . 3.16 + OPTIONAL { 3.17 + ?h gag:hasMunicipality ?muni . 3.18 + } 3.19 + FILTER(!bound(?muni)) . 3.20 +}
4.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 4.2 +++ b/scripts/v2.3/discover.sparql Wed Jul 25 01:59:00 2012 +0300 4.3 @@ -0,0 +1,19 @@ 4.4 +PREFIX noa: <http://teleios.di.uoa.gr/ontologies/noaOntology.owl#> 4.5 +PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> 4.6 +PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> 4.7 +PREFIX strdf: <http://strdf.di.uoa.gr/ontology#> 4.8 +PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> 4.9 +PREFIX gag: <http://teleios.di.uoa.gr/ontologies/gagKallikratis.rdf#> 4.10 + 4.11 +SELECT ?h (strdf:transform(?hGeo, <http://www.opengis.net/def/crs/EPSG/0/4326>) AS ?geo) ?conf ?muni 4.12 +WHERE { 4.13 +?h noa:hasGeometry ?hGeo ; 4.14 + noa:hasAcquisitionTime ?hAcquisitionTime ; 4.15 + noa:producedFromProcessingChain "PROCESSING_CHAIN"^^xsd:string ; 4.16 + noa:isDerivedFromSensor "SENSOR"^^xsd:string ; 4.17 + noa:hasConfidence ?conf ; 4.18 + gag:hasMunicipality ?muni . 4.19 + FILTER("MIN_ACQUISITION_TIME"^^xsd:dateTime <= ?hAcquisitionTime && ?hAcquisitionTime < "MAX_ACQUISITION_TIME"^^xsd:dateTime ) . 4.20 + OPTIONAL {?h noa:isDiscarded ?disc }. 4.21 + FILTER (!bound(?disc)) . 4.22 +}
5.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 5.2 +++ b/scripts/v2.3/insertMunicipalities.sparql Wed Jul 25 01:59:00 2012 +0300 5.3 @@ -0,0 +1,23 @@ 5.4 +PREFIX noa: <http://teleios.di.uoa.gr/ontologies/noaOntology.owl#> 5.5 +PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> 5.6 +PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> 5.7 +PREFIX strdf: <http://strdf.di.uoa.gr/ontology#> 5.8 +PREFIX gag: <http://teleios.di.uoa.gr/ontologies/gagKallikratis.rdf#> 5.9 +PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> 5.10 + 5.11 +INSERT { ?h gag:hasMunicipality ?muni } 5.12 +WHERE { 5.13 +SELECT ?h (SAMPLE(?mLabel) AS ?muni) 5.14 + WHERE { 5.15 + ?h rdf:type noa:Hotspot ; 5.16 + noa:hasGeometry ?hGeo ; 5.17 + noa:isDerivedFromSensor "SENSOR"^^xsd:string ; 5.18 + noa:producedFromProcessingChain "PROCESSING_CHAIN"^^xsd:string ; 5.19 + noa:hasAcquisitionTime "TIMESTAMP"^^xsd:dateTime. 5.20 + ?m rdf:type gag:Dhmos ; 5.21 + rdfs:label ?mLabel ; 5.22 + strdf:hasGeometry ?mGeo . 5.23 + FILTER(strdf:mbbIntersects(?hGeo, ?mGeo)) . 5.24 + } 5.25 + GROUP BY ?h 5.26 +}
6.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 6.2 +++ b/scripts/v2.3/landUseInvalidForFires.sparql Wed Jul 25 01:59:00 2012 +0300 6.3 @@ -0,0 +1,23 @@ 6.4 +PREFIX noa: <http://teleios.di.uoa.gr/ontologies/noaOntology.owl#> 6.5 +PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> 6.6 +PREFIX strdf: <http://strdf.di.uoa.gr/ontology#> 6.7 +PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> 6.8 +PREFIX clc: <http://geo.linkedopendata.gr/corine/ontology#> 6.9 + 6.10 + 6.11 + 6.12 +INSERT {?h noa:isDiscarded "1"^^xsd:int} 6.13 +WHERE { 6.14 +SELECT ?h WHERE 6.15 +{ 6.16 +?h noa:hasAcquisitionTime "TIMESTAMP"^^xsd:dateTime ; 6.17 + noa:producedFromProcessingChain "PROCESSING_CHAIN"^^xsd:string ; 6.18 + noa:isDerivedFromSensor "SENSOR"^^xsd:string ; 6.19 + noa:hasGeometry ?hGeo. 6.20 +?a rdf:type clc:ExcludeArea; 6.21 + clc:hasGeometry ?aGeo. 6.22 +FILTER(strdf:mbbIntersects(?hGeo,?aGeo)). 6.23 +} 6.24 +GROUP BY ?h ?hGeo 6.25 +HAVING strdf:contains(strdf:union(?aGeo),?hGeo) 6.26 +}
7.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 7.2 +++ b/scripts/v2.3/refinePartialSeaHotspots.sparql Wed Jul 25 01:59:00 2012 +0300 7.3 @@ -0,0 +1,35 @@ 7.4 +PREFIX noa: <http://teleios.di.uoa.gr/ontologies/noaOntology.owl#> 7.5 +PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> 7.6 +PREFIX strdf: <http://strdf.di.uoa.gr/ontology#> 7.7 +PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> 7.8 +PREFIX gag: <http://teleios.di.uoa.gr/ontologies/gagKallikratis.rdf#> 7.9 + 7.10 + 7.11 +INSERT { ?h noa:isDiscarded "1"^^xsd:int . 7.12 + ?valid rdf:type noa:Hotspot ; 7.13 + noa:hasConfidence ?conf ; 7.14 + noa:hasGeometry ?dif ; 7.15 + gag:hasMunicipality ?muni ; 7.16 + noa:hasAcquisitionTime "TIMESTAMP"^^xsd:dateTime ; 7.17 + noa:isDerivedFromSensor "SENSOR"^^xsd:string ; 7.18 + noa:hasConfirmation noa:unknown ; 7.19 + noa:producedFromProcessingChain "PROCESSING_CHAIN"^^xsd:string ; 7.20 + noa:isProducedBy noa:noa ; 7.21 + noa:isDerivedFromSatellite "SAT"^^xsd:string . 7.22 +} 7.23 +WHERE { 7.24 + SELECT ?h (strdf:intersection(?hGeo, strdf:union(?cGeo)) AS ?dif) (URI(CONCAT(STR(?h),"/refined")) AS ?valid) ?conf ?muni 7.25 + WHERE { 7.26 +?h noa:hasAcquisitionTime "TIMESTAMP"^^xsd:dateTime; 7.27 + noa:producedFromProcessingChain "PROCESSING_CHAIN"^^xsd:string ; 7.28 + noa:isDerivedFromSensor "SENSOR"^^xsd:string ; 7.29 + noa:hasGeometry ?hGeo ; 7.30 + gag:hasMunicipality ?muni ; 7.31 + noa:hasConfidence ?conf . 7.32 +?c rdf:type noa:Coastline ; 7.33 + noa:hasGeometry ?cGeo . 7.34 + FILTER(strdf:mbbIntersects(?hGeo, ?cGeo)) . 7.35 + } 7.36 + GROUP BY ?h ?hGeo ?conf ?muni 7.37 + HAVING strdf:overlap(?hGeo, strdf:union(?cGeo)) 7.38 +}
8.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 8.2 +++ b/scripts/v2.3/refineTimePersistence.sparql Wed Jul 25 01:59:00 2012 +0300 8.3 @@ -0,0 +1,47 @@ 8.4 +PREFIX noa: <http://teleios.di.uoa.gr/ontologies/noaOntology.owl#> 8.5 +PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> 8.6 +PREFIX strdf: <http://strdf.di.uoa.gr/ontology#> 8.7 +PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> 8.8 +PREFIX gag: <http://teleios.di.uoa.gr/ontologies/gagKallikratis.rdf#> 8.9 + 8.10 + 8.11 +INSERT { 8.12 + ?newHotspot rdf:type noa:Hotspot ; 8.13 + noa:hasConfidence ?hConfidence ; 8.14 + noa:hasGeometry ?hGeometry1 ; 8.15 + noa:hasAcquisitionTime "TIMESTAMP"^^xsd:dateTime ; 8.16 + noa:isDerivedFromSensor "SENSOR"^^xsd:string ; 8.17 + noa:hasConfirmation noa:unknown ; 8.18 + noa:producedFromProcessingChain "PROCESSING_CHAIN-TimePersistence"^^xsd:string ; 8.19 + noa:isProducedBy noa:noa ; 8.20 + gag:hasMunicipality ?muni ; 8.21 + noa:isDerivedFromSatellite "SAT"^^xsd:string . 8.22 + 8.23 +} 8.24 +WHERE { 8.25 + SELECT (URI(CONCAT(STR(MAX(?H1)),"virtual/NEW_HOTSPOT")) AS ?newHotspot) 8.26 + (SUM(?hConfidence1)/ACQUISITIONS_IN_HALF_AN_HOUR AS ?hConfidence) 8.27 + ?hGeometry1 ?muni 8.28 + WHERE { 8.29 + ?H1 noa:hasConfidence ?hConfidence1 . 8.30 + ?H1 noa:hasGeometry ?hGeometry1 . 8.31 + ?H1 gag:hasMunicipality ?muni . 8.32 + ?H1 noa:hasAcquisitionTime ?hAcquisitionTime1 . 8.33 + ?H1 noa:isDerivedFromSensor "SENSOR"^^xsd:string . 8.34 + ?H1 noa:producedFromProcessingChain "PROCESSING_CHAIN"^^xsd:string . 8.35 + OPTIONAL { ?H1 noa:isDiscarded ?z } . 8.36 + FILTER (!BOUND(?z)) . 8.37 + FILTER( "MIN_ACQUISITION_TIME"^^xsd:dateTime <= ?hAcquisitionTime1 && ?hAcquisitionTime1 < "TIMESTAMP"^^xsd:dateTime ) . 8.38 + OPTIONAL { 8.39 + ?H2 noa:hasGeometry ?HGEO2 . 8.40 + ?H2 noa:hasAcquisitionTime "TIMESTAMP"^^xsd:dateTime . 8.41 + ?H2 noa:isDerivedFromSensor "SENSOR"^^xsd:string . 8.42 + ?H2 noa:producedFromProcessingChain ?hProcessingChain2 . 8.43 + FILTER(("PROCESSING_CHAIN"^^xsd:string = ?hProcessingChain2)||("PROCESSING_CHAIN-TimePersistence"^^xsd:string = ?hProcessingChain2)). 8.44 + FILTER( strdf:mbbEquals(?hGeometry1, ?HGEO2) ) . 8.45 + } 8.46 + FILTER( !BOUND(?H2) ) . 8.47 + } 8.48 + GROUP BY ?hGeometry1 ?muni 8.49 + HAVING(SUM(?hConfidence1)>0.0) 8.50 +}
9.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 9.2 +++ b/scripts/v2.3/runNoaRefinementChain-papos.sh Wed Jul 25 01:59:00 2012 +0300 9.3 @@ -0,0 +1,255 @@ 9.4 +#!/bin/bash 9.5 +LOC="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 9.6 + 9.7 +############################ CONFIGURATION ############################# 9.8 +ENDPOINT="http://localhost:8080/endpoint" 9.9 +DB="endpoint" 9.10 + 9.11 +HOTSPOTS_URL="http://localhost/rdf" 9.12 + URLDIR=( "msg1_rss" "msg2") 9.13 + URLPREFIX=( "HMSG1_RSS_IR_039_s7_" "HMSG2_IR_039_s7_") 9.14 + SENSOR=( "MSG1_RSS" "MSG2") 9.15 + SATELITE=( "METEOSAT8" "METEOSAT9") 9.16 +ACQ_HALF_HOUR=( "7.0" "3.0") 9.17 +SUFFIX=".hotspots.n3" 9.18 +PROCESSING_CHAIN="DynamicThresholds" 9.19 + 9.20 + 9.21 +# log files 9.22 +logFile="chain.log" 9.23 +timings="chain-times.log" 9.24 +timingsDiscover="discover.txt" 9.25 + 9.26 +# stSPARQL statements 9.27 +insertMunicipalities=`cat ${LOC}/insertMunicipalities.sparql` 9.28 +deleteSeaHotspots=`cat ${LOC}/deleteSeaHotspots.sparql` 9.29 +invalidForFires=`cat ${LOC}/landUseInvalidForFires.sparql` 9.30 +refinePartialSeaHotspots=`cat ${LOC}/refinePartialSeaHotspots.sparql` 9.31 +refineTimePersistence=`cat ${LOC}/refineTimePersistence.sparql` 9.32 +discover=`cat ${LOC}/discover.sparql` 9.33 +# | sed 's/\"/\\\"/g'` 9.34 +######################################################################## 9.35 + 9.36 +function timer() 9.37 +{ 9.38 + if [[ $# -eq 0 ]]; then 9.39 + t=$(date '+%s%N') 9.40 + t=$((t/1000000)) 9.41 + echo $t 9.42 + else 9.43 + local stime=$1 9.44 + etime=$(date '+%s%N') 9.45 + etime=$((etime/1000000)) 9.46 + 9.47 + if [[ -z "$stime" ]]; then stime=$etime; fi 9.48 + dt=$((etime - stime)) #dt in milliseconds 9.49 + dM=$((dt%1000)) 9.50 + Dt=$((dt/1000)) #delta t in seconds 9.51 + ds=$((Dt % 60)) 9.52 + dm=$(((Dt / 60) % 60)) 9.53 + dh=$((Dt / 3600)) 9.54 + printf '%d:%02d:%02d.%03d' $dh $dm $ds $dM 9.55 + fi 9.56 +} 9.57 + 9.58 +# find out the postgres service to use 9.59 +postgres=$(ls -1 /etc/init.d/| grep postgres | head -1) 9.60 + 9.61 +tomcat= 9.62 +function chooseTomcat() 9.63 +{ 9.64 + if test -s /etc/fedora-release ; then 9.65 + tomcat="tomcat" 9.66 + #elif test -s /etc/centos-release ; then 9.67 + #elif test -s /etc/yellowdog-release ; then 9.68 + #elif test -s /etc/redhat-release ; then 9.69 + #elif test -s /etc/SuSE-release ; then 9.70 + #elif test -s /etc/gentoo-release ; then 9.71 + elif test -s /etc/lsb-release ; then # Ubuntu 9.72 + if test -s /etc/init.d/tomcat6 ; then 9.73 + tomcat="tomcat6" 9.74 + elif test -s /etc/init.d/tomcat7 ; then 9.75 + tomcat="tomcat7" 9.76 + fi 9.77 + elif test -s /etc/debian_version ; then 9.78 + tomcat="tomcat" 9.79 + fi 9.80 + 9.81 + # check for service availability 9.82 + if ! test -e "/etc/init.d/${tomcat}"; then 9.83 + tomcat= 9.84 + fi 9.85 +} 9.86 + 9.87 +# Initialize (stop tomcat, restart postgres, drop/create database, start tomcat) 9.88 +chooseTomcat 9.89 +echo "stopping tomcat" 9.90 +if test -z "${tomcat}"; then 9.91 + # work-around for babis (standalone tomcat, with start_tomcat.sh and stop_tomcat.sh scripts) 9.92 + stop_tomcat.sh 9.93 +else 9.94 + sudo service ${tomcat} stop 9.95 +fi 9.96 + 9.97 +sudo service ${postgres} restart 9.98 + 9.99 +# get the main version of postgres 9.100 +POSTGRES_MAIN_VERSION=$(sudo service ${postgres} status | grep -o '.\..' | cut -b 1) 9.101 + 9.102 +echo "Dropping endpoint database"; 9.103 +dropdb -U postgres ${DB} 9.104 + 9.105 +echo "Creating endpoint database" 9.106 +createdb -U postgres ${DB} 9.107 + 9.108 +# load background data 9.109 +echo "initializing database" 9.110 +curl -s http://dev.strabon.di.uoa.gr/rdf/Kallikratis-Coastline-Corine-dump-postgres-${POSTGRES_MAIN_VERSION}.tgz | tar xz -O | psql -U postgres -d ${DB} 9.111 +psql ${DB} -U postgres -c 'VACUUM ANALYZE ' 9.112 + 9.113 +echo "starting tomcat" 9.114 +if test -z "${tomcat}"; then 9.115 + # work-around for babis (standalone tomcat, with start_tomcat.sh and stop_tomcat.sh scripts) 9.116 + start_tomcat.sh 9.117 +else 9.118 + sudo service ${tomcat} start 9.119 +fi 9.120 + 9.121 +# the chain :) 9.122 +echo "Store Municipalities DeleteInSea InvalidForFires RefineInCoast TimePersistence" > ${timings} 9.123 +for (( i = 0 ; i < ${#URLDIR[@]} ; i++ )) do 9.124 + dir=${URLDIR[$i]} 9.125 + prefix=${URLPREFIX[$i]} 9.126 + sensor=${SENSOR[$i]} 9.127 + satelite=${SATELITE[$i]} 9.128 + acquisitions=${ACQ_HALF_HOUR[$i]} 9.129 + # get hotpost URLS 9.130 + for hot in $(curl -s ${HOTSPOTS_URL}/${dir}/ | grep -o ">${prefix}.*\.n3" | colrm 1 1); do 9.131 + echo $hot 9.132 + file="${HOTSPOTS_URL}/${dir}/${hot}" 9.133 + echo $file 9.134 + 9.135 + # get time information for acquisition 9.136 + offset=$(( ${#prefix} + 1 )) 9.137 + year=$(expr substr ${hot} ${offset} 2) 9.138 + month=$(expr substr ${hot} $(( ${offset} + 2 )) 2) 9.139 + day=$(expr substr ${hot} $(( ${offset} + 4 )) 2) 9.140 + t1=$(expr substr ${hot} $(( ${offset} + 7 )) 2) 9.141 + t2=$(expr substr ${hot} $(( ${offset} + 9 )) 2) 9.142 + time2="${t1}:${t2}" 9.143 + 9.144 + printf "$hot " >> ${timings} 9.145 + 9.146 + # store file 9.147 + echo -n "storing " $file; echo; echo; 9.148 + # ${countTime} ./strabon -db endpoint store $file 9.149 + 9.150 + tmr1=$(timer) 9.151 + ../endpoint store ${ENDPOINT} N-Triples -u ${file} 9.152 + tmr2=$(timer) 9.153 + printf '%s ' $((tmr2-tmr1)) >> ${timings} 9.154 + 9.155 + # sudo -u postgres psql -d endpoint -c 'VACUUM ANALYZE;'; 9.156 + 9.157 + echo;echo;echo;echo "File ${file} stored!" >> ${logFile} 9.158 + 9.159 + # insertMunicipalities 9.160 + echo -n "inserting Municipalities " ;echo; echo; echo; 9.161 + # query=`echo "${insertMunicipalities}" ` 9.162 + # ${countTime} ./strabon -db endpoint update "${query}" 9.163 + 9.164 + tmr1=$(timer) 9.165 + 9.166 + query=`echo "${insertMunicipalities}" | sed "s/TIMESTAMP/20${year}-${month}-${day}T${time2}:00/g" | \ 9.167 + sed "s/PROCESSING_CHAIN/${PROCESSING_CHAIN}/g" | \ 9.168 + sed "s/SENSOR/${sensor}/g"` 9.169 + 9.170 + ../endpoint update ${ENDPOINT} "${query}" 9.171 + 9.172 + tmr2=$(timer) 9.173 +printf '%s ' $((tmr2-tmr1)) >> ${timings} 9.174 + echo;echo;echo;echo "File ${file} inserted Municipalities!" 9.175 + 9.176 + # deleteSeaHotspots 9.177 + echo -n "Going to deleteSeaHotspots 20${year}-${month}-${day}T${time2}:00 " ;echo; echo; echo; 9.178 + query=`echo "${deleteSeaHotspots}" | sed "s/TIMESTAMP/20${year}-${month}-${day}T${time2}:00/g" | \ 9.179 + sed "s/PROCESSING_CHAIN/${PROCESSING_CHAIN}/g" | \ 9.180 + sed "s/SENSOR/${sensor}/g"` 9.181 + # ${countTime} ./strabon -db endpoint update "${query}" 9.182 + 9.183 + tmr1=$(timer) 9.184 + ../endpoint update ${ENDPOINT} "${query}" 9.185 + 9.186 + tmr2=$(timer) 9.187 + printf '%s ' $((tmr2-tmr1)) >> ${timings} 9.188 + echo;echo;echo;echo "File ${file} deleteSeaHotspots done!" 9.189 + 9.190 + # echo "Continue?" 9.191 + # read a 9.192 + # invalidForFires 9.193 + echo -n "invalidForFires 20${year}-${month}-${day}T${time2}:00 " ; echo; echo ; echo; 9.194 + query=`echo "${invalidForFires}" | sed "s/TIMESTAMP/20${year}-${month}-${day}T${time2}:00/g" | \ 9.195 + sed "s/PROCESSING_CHAIN/${PROCESSING_CHAIN}/g" | \ 9.196 + sed "s/SENSOR/${sensor}/g" |\ 9.197 + sed "s/SAT/${satelite}/g"` 9.198 + # ${countTime} ./strabon -db endpoint update "${query}" 9.199 + tmr1=$(timer) 9.200 + ../endpoint update ${ENDPOINT} "${query}" 9.201 + tmr2=$(timer) 9.202 + printf '%s ' $((tmr2-tmr1)) >> ${timings} 9.203 + echo "File ${file} invalidForFires done!" 9.204 + 9.205 + # refinePartialSeaHotspots 9.206 + echo -n "refinePartialSeaHotspots 20${year}-${month}-${day}T${time2}:00 " ; echo; echo ; echo; 9.207 + query=`echo "${refinePartialSeaHotspots}" | sed "s/TIMESTAMP/20${year}-${month}-${day}T${time2}:00/g" | \ 9.208 + sed "s/PROCESSING_CHAIN/${PROCESSING_CHAIN}/g" | \ 9.209 + sed "s/SENSOR/${sensor}/g" |\ 9.210 + sed "s/SAT/${satelite}/g"` 9.211 + # ${countTime} ./strabon -db endpoint update "${query}" 9.212 + tmr1=$(timer) 9.213 + ../endpoint update ${ENDPOINT} "${query}" 9.214 + tmr2=$(timer) 9.215 + printf '%s ' $((tmr2-tmr1)) >> ${timings} 9.216 + 9.217 + echo "File ${file} refinePartialSeaHotspots done!" 9.218 + # echo "Continue?" 9.219 + # read a 9.220 + 9.221 + # refineTimePersistence 9.222 + echo -n "Going to refineTimePersistence 20${year}-${month}-${day}T${time2}:00 ";echo;echo;echo; 9.223 + min_acquisition_time=`date --date="20${year}-${month}-${day} ${time2}:00 EEST -30 minutes" +%Y-%m-%dT%H:%m:00` 9.224 + query=`echo "${refineTimePersistence}" | sed "s/TIMESTAMP/20${year}-${month}-${day}T${time2}:00/g" | \ 9.225 + sed "s/PROCESSING_CHAIN/${PROCESSING_CHAIN}/g" | \ 9.226 + sed "s/SENSOR/${sensor}/g" | \ 9.227 + sed "s/ACQUISITIONS_IN_HALF_AN_HOUR/${acquisitions}/g" | \ 9.228 + sed "s/MIN_ACQUISITION_TIME/${min_acquisition_time}/g" |\ 9.229 + sed "s/SAT/${satelite}/g"` 9.230 + 9.231 + #sudo -u postgres psql -d ${DB} -c 'VACUUM ANALYZE;'; 9.232 + 9.233 + tmr1=$(timer) 9.234 + ../endpoint update ${ENDPOINT} "${query}" 9.235 + tmr2=$(timer) 9.236 + printf '%s \n' $((tmr2-tmr1)) >> ${timings} 9.237 + echo;echo;echo;echo "File ${file} timePersistence done!" 9.238 + # echo "Continue?" 9.239 + # read a 9.240 + 9.241 + 9.242 + # discover 9.243 + echo -n "Going to discover 20${year}-${month}-${day}T${time2}:00 ";echo;echo;echo; 9.244 + min_acquisition_time=`date --date="20${year}-${month}-${day} 00:00 EEST" +%Y-%m-%dT%H:%m:00` 9.245 + max_acquisition_time=`date --date="20${year}-${month}-${day} 23:59 EEST" +%Y-%m-%dT%H:%m:00` 9.246 + query=`echo "${discover}" | \ 9.247 + sed "s/PROCESSING_CHAIN/${PROCESSING_CHAIN}/g" | \ 9.248 + sed "s/SENSOR/${sensor}/g" | \ 9.249 + sed "s/MIN_ACQUISITION_TIME/${min_acquisition_time}/g" |\ 9.250 + sed "s/MAX_ACQUISITION_TIME/${max_acquisition_time}/g"` 9.251 + 9.252 + tmr1=$(timer) 9.253 + ../endpoint query ${ENDPOINT} "${query}" 9.254 + tmr2=$(timer) 9.255 + printf '%s \n' $((tmr2-tmr1)) >> ${timingsDiscover} 9.256 + echo;echo;echo;echo "Discovered hotspots done!" 9.257 + done 9.258 +done
10.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 10.2 +++ b/scripts/v2.3/runNoaRefinementChain.sh Wed Jul 25 01:59:00 2012 +0300 10.3 @@ -0,0 +1,265 @@ 10.4 +#!/bin/bash 10.5 +LOC="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 10.6 + 10.7 +ENDPOINT="http://localhost:8080/endpoint" 10.8 +DB="endpoint" 10.9 + 10.10 +#HOTSPOTS_URL="http://jose.di.uoa.gr/rdf/hotspots/MSG2" 10.11 +HOTSPOTS_URL="http://jose.di.uoa.gr/rdf/hotspots/MSG1" 10.12 + 10.13 +logFile="chain.log" 10.14 + 10.15 +function timer() 10.16 +{ 10.17 + if [[ $# -eq 0 ]]; then 10.18 + t=$(date '+%s%N') 10.19 + t=$((t/1000000)) 10.20 + echo $t 10.21 + else 10.22 + local stime=$1 10.23 + etime=$(date '+%s%N') 10.24 + etime=$((etime/1000000)) 10.25 + 10.26 + if [[ -z "$stime" ]]; then stime=$etime; fi 10.27 + dt=$((etime - stime)) #dt in milliseconds 10.28 + dM=$((dt%1000)) 10.29 + Dt=$((dt/1000)) #delta t in seconds 10.30 + ds=$((Dt % 60)) 10.31 + dm=$(((Dt / 60) % 60)) 10.32 + dh=$((Dt / 3600)) 10.33 + printf '%d:%02d:%02d.%03d' $dh $dm $ds $dM 10.34 + fi 10.35 +} 10.36 + 10.37 +# find out the postgres service to use 10.38 +postgres=$(ls -1 /etc/init.d/| grep postgres | head -1) 10.39 + 10.40 +tomcat= 10.41 +function chooseTomcat() 10.42 +{ 10.43 + if test -s /etc/fedora-release ; then 10.44 + tomcat="tomcat" 10.45 + #elif test -s /etc/centos-release ; then 10.46 + #elif test -s /etc/yellowdog-release ; then 10.47 + #elif test -s /etc/redhat-release ; then 10.48 + #elif test -s /etc/SuSE-release ; then 10.49 + #elif test -s /etc/gentoo-release ; then 10.50 + elif test -s /etc/lsb-release ; then # Ubuntu 10.51 + tomcat=$(ls -1 /etc/init.d/| grep tomcat | head -1) 10.52 + elif test -s /etc/debian_version ; then 10.53 + tomcat="tomcat" 10.54 + fi 10.55 + 10.56 + # check for service availability 10.57 + if ! test -e "/etc/init.d/${tomcat}"; then 10.58 + tomcat= 10.59 + fi 10.60 +} 10.61 + 10.62 +insertMunicipalities=`cat ${LOC}/insertMunicipalities.sparql` 10.63 +deleteSeaHotspots=`cat ${LOC}/deleteSeaHotspots.sparql` # | sed 's/\"/\\\"/g'` 10.64 +refinePartialSeaHotspots=`cat ${LOC}/refinePartialSeaHotspots.sparql` # | sed 's/\"/\\\"/g'` 10.65 +refineTimePersistence=`cat ${LOC}/refineTimePersistence.sparql` # | sed 's/\"/\\\"/g'` 10.66 +invalidForFires=`cat ${LOC}/landUseInvalidForFires.sparql` 10.67 +discover=`cat ${LOC}/discover.sparql` 10.68 +#InsertMunicipalities =`cat ${LOC}/InsertMunicipalities.sparql` # | sed 's/\"/\\\"/g'` 10.69 + 10.70 +# Initialize (stop tomcat, restart postgres, drop/create database, start tomcat) 10.71 +chooseTomcat 10.72 +echo "stopping tomcat" 10.73 +if test -z "${tomcat}"; then 10.74 + # work-around for babis (standalone tomcat, with start_tomcat.sh and stop_tomcat.sh scripts) 10.75 + stop_tomcat.sh 10.76 +else 10.77 + sudo service ${tomcat} stop 10.78 +fi 10.79 + 10.80 +sudo service ${postgres} restart 10.81 + 10.82 +# get the main version of postgres 10.83 +POSTGRES_MAIN_VERSION=$(sudo service ${postgres} status | grep -o '.\..' | cut -b 1) 10.84 + 10.85 +echo "Dropping endpoint database"; 10.86 +dropdb ${DB} 10.87 + 10.88 +echo "Creating endpoint database" 10.89 +createdb ${DB} 10.90 + 10.91 +# load data 10.92 +curl -s http://dev.strabon.di.uoa.gr/rdf/Kallikratis-Coastline-Corine-dump-postgres-${POSTGRES_MAIN_VERSION}.tgz | tar xz -O | psql -d ${DB} 10.93 +psql ${DB} -c 'VACUUM ANALYZE' 10.94 + 10.95 +echo "starting tomcat" 10.96 +if test -z "${tomcat}"; then 10.97 + # work-around for babis (standalone tomcat, with start_tomcat.sh and stop_tomcat.sh scripts) 10.98 + start_tomcat.sh 10.99 + sleep 2 10.100 +else 10.101 + sudo service ${tomcat} start 10.102 +fi 10.103 + 10.104 +echo "initializing database" 10.105 +echo "Timestamp Store Municipalities DeleteInSea InvalidForFires RefineInCoast TimePersistence" > stderr.txt 10.106 + 10.107 + 10.108 +#./scripts/endpoint query ${ENDPOINT} "SELECT (COUNT(*) AS ?C) WHERE {?s ?p ?o}" 10.109 +#sudo -u postgres psql -d endpoint -c 'CREATE INDEX datetime_values_idx_value ON datetime_values USING btree(value)'; 10.110 +#sudo -u postgres psql -d endpoint -c 'VACUUM ANALYZE;'; 10.111 + 10.112 +#for y in 2007 2008 2010 2011 ;do 10.113 +for y in 2012; do 10.114 + # get hotpost URLS 10.115 + for hot in $(curl -s ${HOTSPOTS_URL}/${y}/ | grep -o '>HMSG.*\.nt' | colrm 1 1); do 10.116 + file="${HOTSPOTS_URL}/${y}/${hot}" 10.117 + 10.118 + time_status=$(echo ${hot} | egrep -o '[[:digit:]]{6}_[[:digit:]]{4}') 10.119 + 10.120 + # get sensor 10.121 + SENSOR=$(echo ${hot} | grep -o 'MSG.') 10.122 + 10.123 + # get satellite 10.124 + if test "${SENSOR}" = "MSG2"; then 10.125 + SAT="METEOSAT9" 10.126 + else 10.127 + SAT="METEOSAT8" 10.128 + SENSOR="MSG1_RSS" 10.129 + fi 10.130 + 10.131 + # get time information for acquisition and construct timestamp 10.132 + year="20$(expr substr ${time_status} 1 2)" 10.133 + month=$(expr substr ${time_status} 3 2) 10.134 + day=$(expr substr ${time_status} 5 2) 10.135 + time2=$(expr substr ${time_status} 8 2) 10.136 + time2="${time2}:$(expr substr ${time_status} 10 2)" 10.137 + 10.138 + # construct timestamp 10.139 + TIMESTAMP="${year}-${month}-${day}T${time2}:00" 10.140 + 10.141 + # store file 10.142 + echo -n "storing " $file; echo; echo; 10.143 + # ${countTime} ./strabon -db endpoint store $file 10.144 + 10.145 + # print timestamp 10.146 + echo -n "${TIMESTAMP} " >> stderr.txt 10.147 + 10.148 + tmr1=$(timer) 10.149 + ../endpoint store ${ENDPOINT} N-Triples -u ${file} 10.150 + tmr2=$(timer) 10.151 + printf '%s ' $((tmr2-tmr1)) >> stderr.txt 10.152 + 10.153 + # sudo -u postgres psql -d endpoint -c 'VACUUM ANALYZE;'; 10.154 + 10.155 + echo;echo;echo;echo "File ${file} stored!" >> ${logFile} 10.156 + 10.157 + # insertMunicipalities 10.158 + echo -n "inserting Municipalities " ;echo; echo; echo; 10.159 + # query=`echo "${insertMunicipalities}" ` 10.160 + # ${countTime} ./strabon -db endpoint update "${query}" 10.161 + 10.162 + tmr1=$(timer) 10.163 + 10.164 + query=`echo "${insertMunicipalities}" | sed "s/TIMESTAMP/${year}-${month}-${day}T${time2}:00/g" | \ 10.165 + sed "s/PROCESSING_CHAIN/DynamicThresholds/g" | \ 10.166 + sed "s/SENSOR/${SENSOR}/g"` 10.167 + 10.168 + ../endpoint update ${ENDPOINT} "${query}" 10.169 + 10.170 + tmr2=$(timer) 10.171 + printf '%s ' $((tmr2-tmr1)) >>stderr.txt 10.172 + echo;echo;echo;echo "File ${file} inserted Municipalities!" 10.173 + 10.174 + # execute an explicit VACUUM ANALYZE when a query takes longer than it should 10.175 + duration=$((tmr2-tmr1)) 10.176 + if test ${duration} -ge 30000; then 10.177 + psql ${DB} -c 'VACUUM ANALYZE' 10.178 + echo "Explicit VACUUM ANALYZE" 10.179 + fi 10.180 + 10.181 + # deleteSeaHotspots 10.182 + echo -n "Going to deleteSeaHotspots ${year}-${month}-${day}T${time2}:00 " ;echo; echo; echo; 10.183 + query=`echo "${deleteSeaHotspots}" | sed "s/TIMESTAMP/${year}-${month}-${day}T${time2}:00/g" | \ 10.184 + sed "s/PROCESSING_CHAIN/DynamicThresholds/g" | \ 10.185 + sed "s/SENSOR/${SENSOR}/g"` 10.186 + # ${countTime} ./strabon -db endpoint update "${query}" 10.187 + 10.188 + tmr1=$(timer) 10.189 + ../endpoint update ${ENDPOINT} "${query}" 10.190 + 10.191 + tmr2=$(timer) 10.192 + printf '%s ' $((tmr2-tmr1)) >>stderr.txt 10.193 + echo;echo;echo;echo "File ${file} deleteSeaHotspots done!" 10.194 + 10.195 + # echo "Continue?" 10.196 + # read a 10.197 + # invalidForFires 10.198 + echo -n "invalidForFires ${year}-${month}-${day}T${time2}:00 " ; echo; echo ; echo; 10.199 + query=`echo "${invalidForFires}" | sed "s/TIMESTAMP/${year}-${month}-${day}T${time2}:00/g" | \ 10.200 + sed "s/PROCESSING_CHAIN/DynamicThresholds/g" | \ 10.201 + sed "s/SENSOR/${SENSOR}/g" |\ 10.202 + sed "s/SAT/${SAT}/g"` 10.203 + # ${countTime} ./strabon -db endpoint update "${query}" 10.204 + tmr1=$(timer) 10.205 + ../endpoint update ${ENDPOINT} "${query}" 10.206 + tmr2=$(timer) 10.207 + printf '%s ' $((tmr2-tmr1)) >>stderr.txt 10.208 + echo "File ${file} invalidForFires done!" 10.209 + 10.210 + # refinePartialSeaHotspots 10.211 + echo -n "refinePartialSeaHotspots ${year}-${month}-${day}T${time2}:00 " ; echo; echo ; echo; 10.212 + query=`echo "${refinePartialSeaHotspots}" | sed "s/TIMESTAMP/${year}-${month}-${day}T${time2}:00/g" | \ 10.213 + sed "s/PROCESSING_CHAIN/DynamicThresholds/g" | \ 10.214 + sed "s/SENSOR/${SENSOR}/g" |\ 10.215 + sed "s/SAT/${SAT}/g"` 10.216 + # ${countTime} ./strabon -db endpoint update "${query}" 10.217 + tmr1=$(timer) 10.218 + ../endpoint update ${ENDPOINT} "${query}" 10.219 + tmr2=$(timer) 10.220 + printf '%s ' $((tmr2-tmr1)) >>stderr.txt 10.221 + 10.222 + echo "File ${file} refinePartialSeaHotspots done!" 10.223 + # echo "Continue?" 10.224 + # read a 10.225 + 10.226 + # refineTimePersistence 10.227 + echo -n "Going to refineTimePersistence ${year}-${month}-${day}T${time2}:00 ";echo;echo;echo; 10.228 + min_acquisition_time=`date --date="${year}-${month}-${day} ${time2}:00 EEST -30 minutes" +%Y-%m-%dT%H:%M:00` 10.229 + newHotspotTimestamp=`date --date="${year}-${month}-${day} ${time2}:00" +%y%m%d_%H%M` 10.230 + 10.231 + query=`echo "${refineTimePersistence}" | sed "s/TIMESTAMP/${year}-${month}-${day}T${time2}:00/g" | \ 10.232 + sed "s/PROCESSING_CHAIN/DynamicThresholds/g" | \ 10.233 + sed "s/SENSOR/${SENSOR}/g" | \ 10.234 + sed "s/ACQUISITIONS_IN_HALF_AN_HOUR/3.0/g" | \ 10.235 + sed "s/MIN_ACQUISITION_TIME/${min_acquisition_time}/g" |\ 10.236 + sed "s/SAT/${SAT}/g" | \ 10.237 + sed "s/NEW_HOTSPOT/${newHotspotTimestamp}/g"` 10.238 + 10.239 + #sudo -u postgres psql -d ${DB} -c 'VACUUM ANALYZE;'; 10.240 + 10.241 + tmr1=$(timer) 10.242 + ../endpoint update ${ENDPOINT} "${query}" 10.243 + tmr2=$(timer) 10.244 + printf '%s \n' $((tmr2-tmr1)) >>stderr.txt 10.245 + echo;echo;echo;echo "File ${file} timePersistence done!" 10.246 + # echo "Continue?" 10.247 + # read a 10.248 + 10.249 + 10.250 + # discover 10.251 + echo -n "Going to discover ${year}-${month}-${day}T${time2}:00 ";echo;echo;echo; 10.252 + min_acquisition_time=`date --date="${year}-${month}-${day} 00:00 EEST" +%Y-%m-%dT%H:%M:00` 10.253 + max_acquisition_time=`date --date="${year}-${month}-${day} 23:59 EEST" +%Y-%m-%dT%H:%M:00` 10.254 + query=`echo "${discover}" | \ 10.255 + sed "s/PROCESSING_CHAIN/DynamicThresholds/g" | \ 10.256 + sed "s/SENSOR/${SENSOR}/g" | \ 10.257 + sed "s/MIN_ACQUISITION_TIME/${min_acquisition_time}/g" |\ 10.258 + sed "s/MAX_ACQUISITION_TIME/${max_acquisition_time}/g"` 10.259 + 10.260 + tmr1=$(timer) 10.261 + ../endpoint query ${ENDPOINT} "${query}" 10.262 + tmr2=$(timer) 10.263 + printf '%s \n' $((tmr2-tmr1)) >>discover.txt 10.264 + echo;echo;echo;echo "Discovered hotspots done!" 10.265 + 10.266 + done 10.267 +done 10.268 +