diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/404.html b/404.html new file mode 100644 index 00000000..c96aaf14 --- /dev/null +++ b/404.html @@ -0,0 +1,447 @@ + + + + + + + + + + + + + + + + + + DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ +

404 - Not found

+ +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/DSM2/index.html b/DSM2/index.html new file mode 100644 index 00000000..1024db31 --- /dev/null +++ b/DSM2/index.html @@ -0,0 +1,509 @@ + + + + + + + + + + + + + + + + + + DSM2 - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2

+

Search this documentation

+

Search

+

General Description

+

DSM2 is a river, estuary, and land modeling system.

+
    +
  • River - Can simulate riverine systems, and has been extended from + Sacramento to Shasta Dam. Also has been tested with high flow/stage + simulations for flood modeling.
  • +
  • Estuary - Completely flexible estuary model; stages and flows may + be specified at boundary and internal points.
  • +
  • Land - Includes effects from land-based processes, such as + consumptive use and agricultural runoff.
  • +
+

DSM2 can calculate stages, flows, velocities; many mass transport +processes, including salts, multiple non-conservative constituents, +temperature, THM formation potential and individual particles.

+

The model is copyrighted by the State of California, Department of Water +Resources. It is licensed under the GNU General Public License, version +2. This means it can be copied, distributed, and modified freely, but +you may not restrict others in their ability to copy, distribute, and +modify it. See the License for more details. Also notice the list of protected routines.

+

Background and Reference Information

+

DSM2 Model Documentation

+

DSM2 Calibration Memo

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/Delta_DSM2_Documentation/index.html b/Delta_DSM2_Documentation/index.html new file mode 100644 index 00000000..66f47763 --- /dev/null +++ b/Delta_DSM2_Documentation/index.html @@ -0,0 +1,473 @@ + + + + + + + + + + + + + + + + + + Delta DSM2 Documentation - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/Table_of_Contents/index.html b/Table_of_Contents/index.html new file mode 100644 index 00000000..3a61ef5d --- /dev/null +++ b/Table_of_Contents/index.html @@ -0,0 +1,515 @@ + + + + + + + + + + + + + + + + + + Table of Contents - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + + + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/assets/images/favicon.png b/assets/images/favicon.png new file mode 100644 index 00000000..1cf13b9f Binary files /dev/null and b/assets/images/favicon.png differ diff --git a/assets/javascripts/bundle.51d95adb.min.js b/assets/javascripts/bundle.51d95adb.min.js new file mode 100644 index 00000000..b20ec683 --- /dev/null +++ b/assets/javascripts/bundle.51d95adb.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var Hi=Object.create;var xr=Object.defineProperty;var Pi=Object.getOwnPropertyDescriptor;var $i=Object.getOwnPropertyNames,kt=Object.getOwnPropertySymbols,Ii=Object.getPrototypeOf,Er=Object.prototype.hasOwnProperty,an=Object.prototype.propertyIsEnumerable;var on=(e,t,r)=>t in e?xr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,P=(e,t)=>{for(var r in t||(t={}))Er.call(t,r)&&on(e,r,t[r]);if(kt)for(var r of kt(t))an.call(t,r)&&on(e,r,t[r]);return e};var sn=(e,t)=>{var r={};for(var n in e)Er.call(e,n)&&t.indexOf(n)<0&&(r[n]=e[n]);if(e!=null&&kt)for(var n of kt(e))t.indexOf(n)<0&&an.call(e,n)&&(r[n]=e[n]);return r};var Ht=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var Fi=(e,t,r,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of $i(t))!Er.call(e,o)&&o!==r&&xr(e,o,{get:()=>t[o],enumerable:!(n=Pi(t,o))||n.enumerable});return e};var yt=(e,t,r)=>(r=e!=null?Hi(Ii(e)):{},Fi(t||!e||!e.__esModule?xr(r,"default",{value:e,enumerable:!0}):r,e));var fn=Ht((wr,cn)=>{(function(e,t){typeof wr=="object"&&typeof cn!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(wr,function(){"use strict";function e(r){var n=!0,o=!1,i=null,a={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function s(T){return!!(T&&T!==document&&T.nodeName!=="HTML"&&T.nodeName!=="BODY"&&"classList"in T&&"contains"in T.classList)}function f(T){var Ke=T.type,We=T.tagName;return!!(We==="INPUT"&&a[Ke]&&!T.readOnly||We==="TEXTAREA"&&!T.readOnly||T.isContentEditable)}function c(T){T.classList.contains("focus-visible")||(T.classList.add("focus-visible"),T.setAttribute("data-focus-visible-added",""))}function u(T){T.hasAttribute("data-focus-visible-added")&&(T.classList.remove("focus-visible"),T.removeAttribute("data-focus-visible-added"))}function p(T){T.metaKey||T.altKey||T.ctrlKey||(s(r.activeElement)&&c(r.activeElement),n=!0)}function m(T){n=!1}function d(T){s(T.target)&&(n||f(T.target))&&c(T.target)}function h(T){s(T.target)&&(T.target.classList.contains("focus-visible")||T.target.hasAttribute("data-focus-visible-added"))&&(o=!0,window.clearTimeout(i),i=window.setTimeout(function(){o=!1},100),u(T.target))}function v(T){document.visibilityState==="hidden"&&(o&&(n=!0),B())}function B(){document.addEventListener("mousemove",z),document.addEventListener("mousedown",z),document.addEventListener("mouseup",z),document.addEventListener("pointermove",z),document.addEventListener("pointerdown",z),document.addEventListener("pointerup",z),document.addEventListener("touchmove",z),document.addEventListener("touchstart",z),document.addEventListener("touchend",z)}function re(){document.removeEventListener("mousemove",z),document.removeEventListener("mousedown",z),document.removeEventListener("mouseup",z),document.removeEventListener("pointermove",z),document.removeEventListener("pointerdown",z),document.removeEventListener("pointerup",z),document.removeEventListener("touchmove",z),document.removeEventListener("touchstart",z),document.removeEventListener("touchend",z)}function z(T){T.target.nodeName&&T.target.nodeName.toLowerCase()==="html"||(n=!1,re())}document.addEventListener("keydown",p,!0),document.addEventListener("mousedown",m,!0),document.addEventListener("pointerdown",m,!0),document.addEventListener("touchstart",m,!0),document.addEventListener("visibilitychange",v,!0),B(),r.addEventListener("focus",d,!0),r.addEventListener("blur",h,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var un=Ht(Sr=>{(function(e){var t=function(){try{return!!Symbol.iterator}catch(c){return!1}},r=t(),n=function(c){var u={next:function(){var p=c.shift();return{done:p===void 0,value:p}}};return r&&(u[Symbol.iterator]=function(){return u}),u},o=function(c){return encodeURIComponent(c).replace(/%20/g,"+")},i=function(c){return decodeURIComponent(String(c).replace(/\+/g," "))},a=function(){var c=function(p){Object.defineProperty(this,"_entries",{writable:!0,value:{}});var m=typeof p;if(m!=="undefined")if(m==="string")p!==""&&this._fromString(p);else if(p instanceof c){var d=this;p.forEach(function(re,z){d.append(z,re)})}else if(p!==null&&m==="object")if(Object.prototype.toString.call(p)==="[object Array]")for(var h=0;hd[0]?1:0}),c._entries&&(c._entries={});for(var p=0;p1?i(d[1]):"")}})})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Sr);(function(e){var t=function(){try{var o=new e.URL("b","http://a");return o.pathname="c d",o.href==="http://a/c%20d"&&o.searchParams}catch(i){return!1}},r=function(){var o=e.URL,i=function(f,c){typeof f!="string"&&(f=String(f)),c&&typeof c!="string"&&(c=String(c));var u=document,p;if(c&&(e.location===void 0||c!==e.location.href)){c=c.toLowerCase(),u=document.implementation.createHTMLDocument(""),p=u.createElement("base"),p.href=c,u.head.appendChild(p);try{if(p.href.indexOf(c)!==0)throw new Error(p.href)}catch(T){throw new Error("URL unable to set base "+c+" due to "+T)}}var m=u.createElement("a");m.href=f,p&&(u.body.appendChild(m),m.href=m.href);var d=u.createElement("input");if(d.type="url",d.value=f,m.protocol===":"||!/:/.test(m.href)||!d.checkValidity()&&!c)throw new TypeError("Invalid URL");Object.defineProperty(this,"_anchorElement",{value:m});var h=new e.URLSearchParams(this.search),v=!0,B=!0,re=this;["append","delete","set"].forEach(function(T){var Ke=h[T];h[T]=function(){Ke.apply(h,arguments),v&&(B=!1,re.search=h.toString(),B=!0)}}),Object.defineProperty(this,"searchParams",{value:h,enumerable:!0});var z=void 0;Object.defineProperty(this,"_updateSearchParams",{enumerable:!1,configurable:!1,writable:!1,value:function(){this.search!==z&&(z=this.search,B&&(v=!1,this.searchParams._fromString(this.search),v=!0))}})},a=i.prototype,s=function(f){Object.defineProperty(a,f,{get:function(){return this._anchorElement[f]},set:function(c){this._anchorElement[f]=c},enumerable:!0})};["hash","host","hostname","port","protocol"].forEach(function(f){s(f)}),Object.defineProperty(a,"search",{get:function(){return this._anchorElement.search},set:function(f){this._anchorElement.search=f,this._updateSearchParams()},enumerable:!0}),Object.defineProperties(a,{toString:{get:function(){var f=this;return function(){return f.href}}},href:{get:function(){return this._anchorElement.href.replace(/\?$/,"")},set:function(f){this._anchorElement.href=f,this._updateSearchParams()},enumerable:!0},pathname:{get:function(){return this._anchorElement.pathname.replace(/(^\/?)/,"/")},set:function(f){this._anchorElement.pathname=f},enumerable:!0},origin:{get:function(){var f={"http:":80,"https:":443,"ftp:":21}[this._anchorElement.protocol],c=this._anchorElement.port!=f&&this._anchorElement.port!=="";return this._anchorElement.protocol+"//"+this._anchorElement.hostname+(c?":"+this._anchorElement.port:"")},enumerable:!0},password:{get:function(){return""},set:function(f){},enumerable:!0},username:{get:function(){return""},set:function(f){},enumerable:!0}}),i.createObjectURL=function(f){return o.createObjectURL.apply(o,arguments)},i.revokeObjectURL=function(f){return o.revokeObjectURL.apply(o,arguments)},e.URL=i};if(t()||r(),e.location!==void 0&&!("origin"in e.location)){var n=function(){return e.location.protocol+"//"+e.location.hostname+(e.location.port?":"+e.location.port:"")};try{Object.defineProperty(e.location,"origin",{get:n,enumerable:!0})}catch(o){setInterval(function(){e.location.origin=n()},100)}}})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Sr)});var Qr=Ht((Lt,Kr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof Lt=="object"&&typeof Kr=="object"?Kr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof Lt=="object"?Lt.ClipboardJS=r():t.ClipboardJS=r()})(Lt,function(){return function(){var e={686:function(n,o,i){"use strict";i.d(o,{default:function(){return ki}});var a=i(279),s=i.n(a),f=i(370),c=i.n(f),u=i(817),p=i.n(u);function m(j){try{return document.execCommand(j)}catch(O){return!1}}var d=function(O){var w=p()(O);return m("cut"),w},h=d;function v(j){var O=document.documentElement.getAttribute("dir")==="rtl",w=document.createElement("textarea");w.style.fontSize="12pt",w.style.border="0",w.style.padding="0",w.style.margin="0",w.style.position="absolute",w.style[O?"right":"left"]="-9999px";var k=window.pageYOffset||document.documentElement.scrollTop;return w.style.top="".concat(k,"px"),w.setAttribute("readonly",""),w.value=j,w}var B=function(O,w){var k=v(O);w.container.appendChild(k);var F=p()(k);return m("copy"),k.remove(),F},re=function(O){var w=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},k="";return typeof O=="string"?k=B(O,w):O instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(O==null?void 0:O.type)?k=B(O.value,w):(k=p()(O),m("copy")),k},z=re;function T(j){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?T=function(w){return typeof w}:T=function(w){return w&&typeof Symbol=="function"&&w.constructor===Symbol&&w!==Symbol.prototype?"symbol":typeof w},T(j)}var Ke=function(){var O=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},w=O.action,k=w===void 0?"copy":w,F=O.container,q=O.target,Le=O.text;if(k!=="copy"&&k!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(q!==void 0)if(q&&T(q)==="object"&&q.nodeType===1){if(k==="copy"&&q.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(k==="cut"&&(q.hasAttribute("readonly")||q.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(Le)return z(Le,{container:F});if(q)return k==="cut"?h(q):z(q,{container:F})},We=Ke;function Ie(j){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?Ie=function(w){return typeof w}:Ie=function(w){return w&&typeof Symbol=="function"&&w.constructor===Symbol&&w!==Symbol.prototype?"symbol":typeof w},Ie(j)}function Ti(j,O){if(!(j instanceof O))throw new TypeError("Cannot call a class as a function")}function nn(j,O){for(var w=0;w0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof F.action=="function"?F.action:this.defaultAction,this.target=typeof F.target=="function"?F.target:this.defaultTarget,this.text=typeof F.text=="function"?F.text:this.defaultText,this.container=Ie(F.container)==="object"?F.container:document.body}},{key:"listenClick",value:function(F){var q=this;this.listener=c()(F,"click",function(Le){return q.onClick(Le)})}},{key:"onClick",value:function(F){var q=F.delegateTarget||F.currentTarget,Le=this.action(q)||"copy",Rt=We({action:Le,container:this.container,target:this.target(q),text:this.text(q)});this.emit(Rt?"success":"error",{action:Le,text:Rt,trigger:q,clearSelection:function(){q&&q.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(F){return yr("action",F)}},{key:"defaultTarget",value:function(F){var q=yr("target",F);if(q)return document.querySelector(q)}},{key:"defaultText",value:function(F){return yr("text",F)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(F){var q=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return z(F,q)}},{key:"cut",value:function(F){return h(F)}},{key:"isSupported",value:function(){var F=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],q=typeof F=="string"?[F]:F,Le=!!document.queryCommandSupported;return q.forEach(function(Rt){Le=Le&&!!document.queryCommandSupported(Rt)}),Le}}]),w}(s()),ki=Ri},828:function(n){var o=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function a(s,f){for(;s&&s.nodeType!==o;){if(typeof s.matches=="function"&&s.matches(f))return s;s=s.parentNode}}n.exports=a},438:function(n,o,i){var a=i(828);function s(u,p,m,d,h){var v=c.apply(this,arguments);return u.addEventListener(m,v,h),{destroy:function(){u.removeEventListener(m,v,h)}}}function f(u,p,m,d,h){return typeof u.addEventListener=="function"?s.apply(null,arguments):typeof m=="function"?s.bind(null,document).apply(null,arguments):(typeof u=="string"&&(u=document.querySelectorAll(u)),Array.prototype.map.call(u,function(v){return s(v,p,m,d,h)}))}function c(u,p,m,d){return function(h){h.delegateTarget=a(h.target,p),h.delegateTarget&&d.call(u,h)}}n.exports=f},879:function(n,o){o.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},o.nodeList=function(i){var a=Object.prototype.toString.call(i);return i!==void 0&&(a==="[object NodeList]"||a==="[object HTMLCollection]")&&"length"in i&&(i.length===0||o.node(i[0]))},o.string=function(i){return typeof i=="string"||i instanceof String},o.fn=function(i){var a=Object.prototype.toString.call(i);return a==="[object Function]"}},370:function(n,o,i){var a=i(879),s=i(438);function f(m,d,h){if(!m&&!d&&!h)throw new Error("Missing required arguments");if(!a.string(d))throw new TypeError("Second argument must be a String");if(!a.fn(h))throw new TypeError("Third argument must be a Function");if(a.node(m))return c(m,d,h);if(a.nodeList(m))return u(m,d,h);if(a.string(m))return p(m,d,h);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function c(m,d,h){return m.addEventListener(d,h),{destroy:function(){m.removeEventListener(d,h)}}}function u(m,d,h){return Array.prototype.forEach.call(m,function(v){v.addEventListener(d,h)}),{destroy:function(){Array.prototype.forEach.call(m,function(v){v.removeEventListener(d,h)})}}}function p(m,d,h){return s(document.body,m,d,h)}n.exports=f},817:function(n){function o(i){var a;if(i.nodeName==="SELECT")i.focus(),a=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var s=i.hasAttribute("readonly");s||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),s||i.removeAttribute("readonly"),a=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var f=window.getSelection(),c=document.createRange();c.selectNodeContents(i),f.removeAllRanges(),f.addRange(c),a=f.toString()}return a}n.exports=o},279:function(n){function o(){}o.prototype={on:function(i,a,s){var f=this.e||(this.e={});return(f[i]||(f[i]=[])).push({fn:a,ctx:s}),this},once:function(i,a,s){var f=this;function c(){f.off(i,c),a.apply(s,arguments)}return c._=a,this.on(i,c,s)},emit:function(i){var a=[].slice.call(arguments,1),s=((this.e||(this.e={}))[i]||[]).slice(),f=0,c=s.length;for(f;f{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var is=/["'&<>]/;Jo.exports=as;function as(e){var t=""+e,r=is.exec(t);if(!r)return t;var n,o="",i=0,a=0;for(i=r.index;i0&&i[i.length-1])&&(c[0]===6||c[0]===2)){r=0;continue}if(c[0]===3&&(!i||c[1]>i[0]&&c[1]=e.length&&(e=void 0),{value:e&&e[n++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function W(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var n=r.call(e),o,i=[],a;try{for(;(t===void 0||t-- >0)&&!(o=n.next()).done;)i.push(o.value)}catch(s){a={error:s}}finally{try{o&&!o.done&&(r=n.return)&&r.call(n)}finally{if(a)throw a.error}}return i}function D(e,t,r){if(r||arguments.length===2)for(var n=0,o=t.length,i;n1||s(m,d)})})}function s(m,d){try{f(n[m](d))}catch(h){p(i[0][3],h)}}function f(m){m.value instanceof Xe?Promise.resolve(m.value.v).then(c,u):p(i[0][2],m)}function c(m){s("next",m)}function u(m){s("throw",m)}function p(m,d){m(d),i.shift(),i.length&&s(i[0][0],i[0][1])}}function mn(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof xe=="function"?xe(e):e[Symbol.iterator](),r={},n("next"),n("throw"),n("return"),r[Symbol.asyncIterator]=function(){return this},r);function n(i){r[i]=e[i]&&function(a){return new Promise(function(s,f){a=e[i](a),o(s,f,a.done,a.value)})}}function o(i,a,s,f){Promise.resolve(f).then(function(c){i({value:c,done:s})},a)}}function A(e){return typeof e=="function"}function at(e){var t=function(n){Error.call(n),n.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var $t=at(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: +`+r.map(function(n,o){return o+1+") "+n.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=r}});function De(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var Fe=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,n,o,i;if(!this.closed){this.closed=!0;var a=this._parentage;if(a)if(this._parentage=null,Array.isArray(a))try{for(var s=xe(a),f=s.next();!f.done;f=s.next()){var c=f.value;c.remove(this)}}catch(v){t={error:v}}finally{try{f&&!f.done&&(r=s.return)&&r.call(s)}finally{if(t)throw t.error}}else a.remove(this);var u=this.initialTeardown;if(A(u))try{u()}catch(v){i=v instanceof $t?v.errors:[v]}var p=this._finalizers;if(p){this._finalizers=null;try{for(var m=xe(p),d=m.next();!d.done;d=m.next()){var h=d.value;try{dn(h)}catch(v){i=i!=null?i:[],v instanceof $t?i=D(D([],W(i)),W(v.errors)):i.push(v)}}}catch(v){n={error:v}}finally{try{d&&!d.done&&(o=m.return)&&o.call(m)}finally{if(n)throw n.error}}}if(i)throw new $t(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)dn(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&De(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&De(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var Or=Fe.EMPTY;function It(e){return e instanceof Fe||e&&"closed"in e&&A(e.remove)&&A(e.add)&&A(e.unsubscribe)}function dn(e){A(e)?e():e.unsubscribe()}var Ae={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var st={setTimeout:function(e,t){for(var r=[],n=2;n0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var n=this,o=this,i=o.hasError,a=o.isStopped,s=o.observers;return i||a?Or:(this.currentObservers=null,s.push(r),new Fe(function(){n.currentObservers=null,De(s,r)}))},t.prototype._checkFinalizedStatuses=function(r){var n=this,o=n.hasError,i=n.thrownError,a=n.isStopped;o?r.error(i):a&&r.complete()},t.prototype.asObservable=function(){var r=new U;return r.source=this,r},t.create=function(r,n){return new wn(r,n)},t}(U);var wn=function(e){ne(t,e);function t(r,n){var o=e.call(this)||this;return o.destination=r,o.source=n,o}return t.prototype.next=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.next)===null||o===void 0||o.call(n,r)},t.prototype.error=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.error)===null||o===void 0||o.call(n,r)},t.prototype.complete=function(){var r,n;(n=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||n===void 0||n.call(r)},t.prototype._subscribe=function(r){var n,o;return(o=(n=this.source)===null||n===void 0?void 0:n.subscribe(r))!==null&&o!==void 0?o:Or},t}(E);var Et={now:function(){return(Et.delegate||Date).now()},delegate:void 0};var wt=function(e){ne(t,e);function t(r,n,o){r===void 0&&(r=1/0),n===void 0&&(n=1/0),o===void 0&&(o=Et);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=n,i._timestampProvider=o,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=n===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,n),i}return t.prototype.next=function(r){var n=this,o=n.isStopped,i=n._buffer,a=n._infiniteTimeWindow,s=n._timestampProvider,f=n._windowTime;o||(i.push(r),!a&&i.push(s.now()+f)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var n=this._innerSubscribe(r),o=this,i=o._infiniteTimeWindow,a=o._buffer,s=a.slice(),f=0;f0?e.prototype.requestAsyncId.call(this,r,n,o):(r.actions.push(this),r._scheduled||(r._scheduled=ut.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,n,o){var i;if(o===void 0&&(o=0),o!=null?o>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,n,o);var a=r.actions;n!=null&&((i=a[a.length-1])===null||i===void 0?void 0:i.id)!==n&&(ut.cancelAnimationFrame(n),r._scheduled=void 0)},t}(Ut);var On=function(e){ne(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var n=this._scheduled;this._scheduled=void 0;var o=this.actions,i;r=r||o.shift();do if(i=r.execute(r.state,r.delay))break;while((r=o[0])&&r.id===n&&o.shift());if(this._active=!1,i){for(;(r=o[0])&&r.id===n&&o.shift();)r.unsubscribe();throw i}},t}(Wt);var we=new On(Tn);var R=new U(function(e){return e.complete()});function Dt(e){return e&&A(e.schedule)}function kr(e){return e[e.length-1]}function Qe(e){return A(kr(e))?e.pop():void 0}function Se(e){return Dt(kr(e))?e.pop():void 0}function Vt(e,t){return typeof kr(e)=="number"?e.pop():t}var pt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function zt(e){return A(e==null?void 0:e.then)}function Nt(e){return A(e[ft])}function qt(e){return Symbol.asyncIterator&&A(e==null?void 0:e[Symbol.asyncIterator])}function Kt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function Ki(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Qt=Ki();function Yt(e){return A(e==null?void 0:e[Qt])}function Gt(e){return ln(this,arguments,function(){var r,n,o,i;return Pt(this,function(a){switch(a.label){case 0:r=e.getReader(),a.label=1;case 1:a.trys.push([1,,9,10]),a.label=2;case 2:return[4,Xe(r.read())];case 3:return n=a.sent(),o=n.value,i=n.done,i?[4,Xe(void 0)]:[3,5];case 4:return[2,a.sent()];case 5:return[4,Xe(o)];case 6:return[4,a.sent()];case 7:return a.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function Bt(e){return A(e==null?void 0:e.getReader)}function $(e){if(e instanceof U)return e;if(e!=null){if(Nt(e))return Qi(e);if(pt(e))return Yi(e);if(zt(e))return Gi(e);if(qt(e))return _n(e);if(Yt(e))return Bi(e);if(Bt(e))return Ji(e)}throw Kt(e)}function Qi(e){return new U(function(t){var r=e[ft]();if(A(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function Yi(e){return new U(function(t){for(var r=0;r=2;return function(n){return n.pipe(e?_(function(o,i){return e(o,i,n)}):me,Oe(1),r?He(t):zn(function(){return new Xt}))}}function Nn(){for(var e=[],t=0;t=2,!0))}function fe(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new E}:t,n=e.resetOnError,o=n===void 0?!0:n,i=e.resetOnComplete,a=i===void 0?!0:i,s=e.resetOnRefCountZero,f=s===void 0?!0:s;return function(c){var u,p,m,d=0,h=!1,v=!1,B=function(){p==null||p.unsubscribe(),p=void 0},re=function(){B(),u=m=void 0,h=v=!1},z=function(){var T=u;re(),T==null||T.unsubscribe()};return g(function(T,Ke){d++,!v&&!h&&B();var We=m=m!=null?m:r();Ke.add(function(){d--,d===0&&!v&&!h&&(p=jr(z,f))}),We.subscribe(Ke),!u&&d>0&&(u=new et({next:function(Ie){return We.next(Ie)},error:function(Ie){v=!0,B(),p=jr(re,o,Ie),We.error(Ie)},complete:function(){h=!0,B(),p=jr(re,a),We.complete()}}),$(T).subscribe(u))})(c)}}function jr(e,t){for(var r=[],n=2;ne.next(document)),e}function K(e,t=document){return Array.from(t.querySelectorAll(e))}function V(e,t=document){let r=se(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function se(e,t=document){return t.querySelector(e)||void 0}function _e(){return document.activeElement instanceof HTMLElement&&document.activeElement||void 0}function tr(e){return L(b(document.body,"focusin"),b(document.body,"focusout")).pipe(ke(1),l(()=>{let t=_e();return typeof t!="undefined"?e.contains(t):!1}),N(e===_e()),Y())}function Be(e){return{x:e.offsetLeft,y:e.offsetTop}}function Yn(e){return L(b(window,"load"),b(window,"resize")).pipe(Ce(0,we),l(()=>Be(e)),N(Be(e)))}function rr(e){return{x:e.scrollLeft,y:e.scrollTop}}function dt(e){return L(b(e,"scroll"),b(window,"resize")).pipe(Ce(0,we),l(()=>rr(e)),N(rr(e)))}var Bn=function(){if(typeof Map!="undefined")return Map;function e(t,r){var n=-1;return t.some(function(o,i){return o[0]===r?(n=i,!0):!1}),n}return function(){function t(){this.__entries__=[]}return Object.defineProperty(t.prototype,"size",{get:function(){return this.__entries__.length},enumerable:!0,configurable:!0}),t.prototype.get=function(r){var n=e(this.__entries__,r),o=this.__entries__[n];return o&&o[1]},t.prototype.set=function(r,n){var o=e(this.__entries__,r);~o?this.__entries__[o][1]=n:this.__entries__.push([r,n])},t.prototype.delete=function(r){var n=this.__entries__,o=e(n,r);~o&&n.splice(o,1)},t.prototype.has=function(r){return!!~e(this.__entries__,r)},t.prototype.clear=function(){this.__entries__.splice(0)},t.prototype.forEach=function(r,n){n===void 0&&(n=null);for(var o=0,i=this.__entries__;o0},e.prototype.connect_=function(){!zr||this.connected_||(document.addEventListener("transitionend",this.onTransitionEnd_),window.addEventListener("resize",this.refresh),xa?(this.mutationsObserver_=new MutationObserver(this.refresh),this.mutationsObserver_.observe(document,{attributes:!0,childList:!0,characterData:!0,subtree:!0})):(document.addEventListener("DOMSubtreeModified",this.refresh),this.mutationEventsAdded_=!0),this.connected_=!0)},e.prototype.disconnect_=function(){!zr||!this.connected_||(document.removeEventListener("transitionend",this.onTransitionEnd_),window.removeEventListener("resize",this.refresh),this.mutationsObserver_&&this.mutationsObserver_.disconnect(),this.mutationEventsAdded_&&document.removeEventListener("DOMSubtreeModified",this.refresh),this.mutationsObserver_=null,this.mutationEventsAdded_=!1,this.connected_=!1)},e.prototype.onTransitionEnd_=function(t){var r=t.propertyName,n=r===void 0?"":r,o=ya.some(function(i){return!!~n.indexOf(i)});o&&this.refresh()},e.getInstance=function(){return this.instance_||(this.instance_=new e),this.instance_},e.instance_=null,e}(),Jn=function(e,t){for(var r=0,n=Object.keys(t);r0},e}(),Zn=typeof WeakMap!="undefined"?new WeakMap:new Bn,eo=function(){function e(t){if(!(this instanceof e))throw new TypeError("Cannot call a class as a function.");if(!arguments.length)throw new TypeError("1 argument required, but only 0 present.");var r=Ea.getInstance(),n=new Ra(t,r,this);Zn.set(this,n)}return e}();["observe","unobserve","disconnect"].forEach(function(e){eo.prototype[e]=function(){var t;return(t=Zn.get(this))[e].apply(t,arguments)}});var ka=function(){return typeof nr.ResizeObserver!="undefined"?nr.ResizeObserver:eo}(),to=ka;var ro=new E,Ha=I(()=>H(new to(e=>{for(let t of e)ro.next(t)}))).pipe(x(e=>L(Te,H(e)).pipe(C(()=>e.disconnect()))),J(1));function de(e){return{width:e.offsetWidth,height:e.offsetHeight}}function ge(e){return Ha.pipe(S(t=>t.observe(e)),x(t=>ro.pipe(_(({target:r})=>r===e),C(()=>t.unobserve(e)),l(()=>de(e)))),N(de(e)))}function bt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function ar(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var no=new E,Pa=I(()=>H(new IntersectionObserver(e=>{for(let t of e)no.next(t)},{threshold:0}))).pipe(x(e=>L(Te,H(e)).pipe(C(()=>e.disconnect()))),J(1));function sr(e){return Pa.pipe(S(t=>t.observe(e)),x(t=>no.pipe(_(({target:r})=>r===e),C(()=>t.unobserve(e)),l(({isIntersecting:r})=>r))))}function oo(e,t=16){return dt(e).pipe(l(({y:r})=>{let n=de(e),o=bt(e);return r>=o.height-n.height-t}),Y())}var cr={drawer:V("[data-md-toggle=drawer]"),search:V("[data-md-toggle=search]")};function io(e){return cr[e].checked}function qe(e,t){cr[e].checked!==t&&cr[e].click()}function je(e){let t=cr[e];return b(t,"change").pipe(l(()=>t.checked),N(t.checked))}function $a(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function Ia(){return L(b(window,"compositionstart").pipe(l(()=>!0)),b(window,"compositionend").pipe(l(()=>!1))).pipe(N(!1))}function ao(){let e=b(window,"keydown").pipe(_(t=>!(t.metaKey||t.ctrlKey)),l(t=>({mode:io("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),_(({mode:t,type:r})=>{if(t==="global"){let n=_e();if(typeof n!="undefined")return!$a(n,r)}return!0}),fe());return Ia().pipe(x(t=>t?R:e))}function Me(){return new URL(location.href)}function ot(e){location.href=e.href}function so(){return new E}function co(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)co(e,r)}function M(e,t,...r){let n=document.createElement(e);if(t)for(let o of Object.keys(t))typeof t[o]!="undefined"&&(typeof t[o]!="boolean"?n.setAttribute(o,t[o]):n.setAttribute(o,""));for(let o of r)co(n,o);return n}function fr(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function fo(){return location.hash.substring(1)}function uo(e){let t=M("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function Fa(){return b(window,"hashchange").pipe(l(fo),N(fo()),_(e=>e.length>0),J(1))}function po(){return Fa().pipe(l(e=>se(`[id="${e}"]`)),_(e=>typeof e!="undefined"))}function Nr(e){let t=matchMedia(e);return Zt(r=>t.addListener(()=>r(t.matches))).pipe(N(t.matches))}function lo(){let e=matchMedia("print");return L(b(window,"beforeprint").pipe(l(()=>!0)),b(window,"afterprint").pipe(l(()=>!1))).pipe(N(e.matches))}function qr(e,t){return e.pipe(x(r=>r?t():R))}function ur(e,t={credentials:"same-origin"}){return ve(fetch(`${e}`,t)).pipe(ce(()=>R),x(r=>r.status!==200?Tt(()=>new Error(r.statusText)):H(r)))}function Ue(e,t){return ur(e,t).pipe(x(r=>r.json()),J(1))}function mo(e,t){let r=new DOMParser;return ur(e,t).pipe(x(n=>n.text()),l(n=>r.parseFromString(n,"text/xml")),J(1))}function pr(e){let t=M("script",{src:e});return I(()=>(document.head.appendChild(t),L(b(t,"load"),b(t,"error").pipe(x(()=>Tt(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(l(()=>{}),C(()=>document.head.removeChild(t)),Oe(1))))}function ho(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function bo(){return L(b(window,"scroll",{passive:!0}),b(window,"resize",{passive:!0})).pipe(l(ho),N(ho()))}function vo(){return{width:innerWidth,height:innerHeight}}function go(){return b(window,"resize",{passive:!0}).pipe(l(vo),N(vo()))}function yo(){return Q([bo(),go()]).pipe(l(([e,t])=>({offset:e,size:t})),J(1))}function lr(e,{viewport$:t,header$:r}){let n=t.pipe(X("size")),o=Q([n,r]).pipe(l(()=>Be(e)));return Q([r,t,o]).pipe(l(([{height:i},{offset:a,size:s},{x:f,y:c}])=>({offset:{x:a.x-f,y:a.y-c+i},size:s})))}(()=>{function e(n,o){parent.postMessage(n,o||"*")}function t(...n){return n.reduce((o,i)=>o.then(()=>new Promise(a=>{let s=document.createElement("script");s.src=i,s.onload=a,document.body.appendChild(s)})),Promise.resolve())}var r=class{constructor(n){this.url=n,this.onerror=null,this.onmessage=null,this.onmessageerror=null,this.m=a=>{a.source===this.w&&(a.stopImmediatePropagation(),this.dispatchEvent(new MessageEvent("message",{data:a.data})),this.onmessage&&this.onmessage(a))},this.e=(a,s,f,c,u)=>{if(s===this.url.toString()){let p=new ErrorEvent("error",{message:a,filename:s,lineno:f,colno:c,error:u});this.dispatchEvent(p),this.onerror&&this.onerror(p)}};let o=new EventTarget;this.addEventListener=o.addEventListener.bind(o),this.removeEventListener=o.removeEventListener.bind(o),this.dispatchEvent=o.dispatchEvent.bind(o);let i=document.createElement("iframe");i.width=i.height=i.frameBorder="0",document.body.appendChild(this.iframe=i),this.w.document.open(),this.w.document.write(` + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

CMake Build

+

Introduction

+

DSM2 consists of many projects and third party libraries. This has meant +running a number of scripts as documented in CMake Build. +It has also entailed opening multiple solution files and build libraries +by hand. Furthermore, the compiler and visual studio upgrades were slow +to adopt due to the cost of breaking changes when upgrading versions. +And finally and most importantly, the daunting task of building all +these files for a different OS such as linux.

+

CMake

+

cmake is a system that +generates the build system. In other words cmake does not help in +building the libraries and projects but encompasses the high-level, OS +and build system independent instructions for generating those systems. +Its introduction and tutorial could be found at +https://cmake.org/

+

A first effort at a cmake generated build system is working for VS2015 +with the latest intel compiler on Windows. The instructions for this and +the files needed have been checked into github master

+
    +
  • A CmakeLibraryMacro.txt is placed at DSM2 root path, with global + macro and environment settings
  • +
  • A CmakeLists is placed under each project/sub-subject to govern its + compilation.
  • +
  • build*.bat is created for DSM2, input_storage, oprule, + respectively, to contain the key cmake commands (listed in the + following sections).
  • +
  • After compilation, the exe/dll are generated under subfolders + BUILD\release or BUILD\debug.
  • +
+

DSM2 core project

+
CMake Instructions
+Create a build directory BUILD under dsm2
+
+
+mkdir BUILD
+cd BUILD
+
+First setup path 
+"C:\Program Files (x86)\IntelSWTools\compilers_and_libraries\windows\bin\compilevars.bat" ia32 vs2015
+
+Next execute for VS2015 the cmake command
+cmake -G "Visual Studio 14 2015" ..\src
+
+Finally open the DSM2.sln file in VS 2008 and compile
+
+or compile from command line with this command
+cmake --build . --target ALL_BUILD --config Debug
+cmake --build . --target ALL_BUILD --config Release
+
+

Input Storage and Oprule

+

The libraries input_storage and oprule are built to support DSM2 core +project. (confirm their building success before compile core project)

+
cd input_storage
+mkdir BUILD
+cd BUILD
+cmake -G "Visual Studio 14 2015" ..\
+cmake --build . --target ALL_BUILD --config Debug
+cmake --build . --target ALL_BUILD --config Release
+
+
cd oprule
+mkdir BUILD
+cd BUILD
+cmake -G "Visual Studio 14 2015" ..\
+cmake --build . --target ALL_BUILD --config Debug
+cmake --build . --target ALL_BUILD --config Release
+
+

Third Party

+

DSM2 relies on third parties and requires the required libraries +below. Usually DSM2 just uses the built-up libraries; however, sometimes +(when version/environment changes), the following libraries need +re-build. Note DSM2 only requires some specific subsets in these +libraries and these specification could be found in +CmakeLibraryMacro.txt

+

+

boost

+

Run bootstrap.bat to build b2.exe; then run b2.exe

+

Note b2.exe is required to run in complete mode (the default minimal +mode won't build the required library; run b2 --help for details)

+

Linux sample for boost build

+
#use this script to get started with new boost library
+#cp this script into new boost library source directory
+./bootstrap.sh --with-toolset=intel-linux
+./b2 --clean
+./b2 -a toolset=intel-linux link=static variant=release --with-filesystem --with-regex --with-system --with-test
+
+

Windos sample for boost build

+
./b2 -a runtime-link=static --with-filesystem --with-regex --with-system --with-test
+
+

HDF5

+

Go to CMake-hdf5-1.8.20 folder to run batch of the relevant version, +e.g. build-VS2015-32.bat, which builds HDF5-1.8.20-win32.zip

+

Unzip it and place it under third_party folder.

+

To build the static libraries which is what is needed with DSM2 static +build see this HDF5 CMake Static Build

+

heclib

+

Build heclib\windows_sources\windows_build_MT_default_settings.bat

+
    +
  • MT is for static version as we needed (MD for dynamic)
  • +
  • Make sure setting compiler path as the required version (as + exemplified in core project)
  • +
+

For using Visual Studio 2017

+

Install Visual Studio 2017:

+

In addition to the standard Visual Studio 2017 installation, download +individual components from the VS2017 installer.  The individual +components needed are:

+

+

Run the installer, and click on Modify, then Individual components tab, +and check the components, finally click on Modify.

+

Change the lines in build_dsm2_vs2015_32b.bat:

+

call "C:\Program Files +(x86)\IntelSWTools\compilers_and_libraries\windows\bin\compilevars.bat" +ia32 vs2015

+

to

+

call "C:\Program Files +(x86)\IntelSWTools\compilers_and_libraries_2019\windows\bin\compilervars.bat" +ia32 vs2017

+

cmake -G "Visual Studio 14 2015" ..\src

+

to 

+

cmake -G "Visual Studio 15 2017" ..\src

+

Rerun the build scripts as instructed above.

+

Attachments:

+

+VS2017_individaul_components.jpg +(image/jpeg)
+ +sum.PNG (image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/build/Cloud_Notes/index.html b/build/Cloud_Notes/index.html new file mode 100644 index 00000000..ee3ab7f7 --- /dev/null +++ b/build/Cloud_Notes/index.html @@ -0,0 +1,508 @@ + + + + + + + + + + + + + + + + + + Cloud Notes - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Cloud Notes

+

This page documents important notes for running DSM2 in the cloud.

+

DSM2 has been compiled on Ubuntu Linux and Windows. Only 32 bit version +has been compiled, the path to 64 bit conversion is much longer due to +C/C++ code that needs cleaning up.

+

Linux

+

DSM2 has been compiled and tested on Linux VMs running Red Hat 4.8 with +kernel version 3.10. Static linking does not seem to work as documented +and the dependencies on Intel fortran libraries are packaged in a lib/ +subfolder.

+

AWS Linux version is Red Hat 7.3 running kernel 4.14.  The following +libraries are needed on top of the base image from AWS

+
#sudo yum upgrade #-- Do this to ensure installs of the below go through
+sudo yum install glibc.i686
+sudo yum install libgcc.i686
+sudo yum install libstdc++.i686
+
+

Windows

+

Windows version is statically compiled and so the executables should +work without any other dependencies. 

+

TODO: JRE version and installation 

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/build/DSM2_v82_Compiling_and_Packaging/index.html b/build/DSM2_v82_Compiling_and_Packaging/index.html new file mode 100644 index 00000000..c176222a --- /dev/null +++ b/build/DSM2_v82_Compiling_and_Packaging/index.html @@ -0,0 +1,764 @@ + + + + + + + + + + + + + + + + + + DSM2 v82 Compiling and Packaging - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 v82 Compiling and Packaging

+

This memo presents step-by-step instruction on compiling and packaging +DSM2 version 8 assuming a minor (bug-fix) release. 

+

Version

+

The latest DSM2 release version is 8.2.0. The first two digits of DSM2 +official version; the 3rd digit is used for minor (bug-fix) +releases.

+

git commit number is also embedded for developer's record, and could be +used to check thru '-version' command.

+

+

The Git number is embedded automatically at compilation time. The +following 3 files contains relevant version info:
+ \dsm2\src\common\version_generate.py
+ \dsm2_distribute\dsm2\install\copy_hydro_qual_ptmDLL_notepad_style.bat
+ \dsm2_distribute\dsm2\install\DSM2setup_Script_Generate.py

+

*BDO is using an 'unstable trunk' branching strategy in GIT for DSM2 +development.
+*GIT categories: master is for major development and bug fix; branches +are usually for separate and time-consuming studies on old versions; +Tags are the records for all release versions.
+*DSM2 version tag (the 3rd digit in the version number) is +designed to use odd number for internal use, and even number for formal +release 

+

Checkout

+

Checkout DSM2 from github: +https://github.com/CADWRDeltaModeling/dsm2.git

+

+

place the project in local directory, e.g. D:\dev\dsm2_master\

+

+

Check out could be conduct at command line

+
git clone https://github.com/CADWRDeltaModeling/dsm2.git
+
+

Or use software sourceTree (recommended for later use). Note which +branch you're checking out (master by default).

+

+

Compile

+

Cmake is currently used to control and adapt project compilation (see +details at CMake Build).

+

From command line, run cmake batch at the project root path. It calls +cmake build of oprule, input_storage, then DSM2 sequentially.

+
build_all.bat
+
+

The building results are exe of hydro and qual, with dll of ptm, under +path \BUILD\Release or Debug

+

+

The major part of PTM module is written in Java programming language and +placed in the source code folder: \dsm2_v8_1\src\ptm. It could be +build in two ways:

+
    +
  • ANT: the default compiling procedure is set in build.xml; open the + command window and run 'ant' (build.xml as default running file) at + current path. The release version of compiled application file will + be automatically placed in the folder: \dsm2_v8_0\src\ptm\lib + (ptm.jar)
  • +
  • Eclipse: DSM2 v82 PTM Compiling with + Eclipse
  • +
+

Libraries

+

All the libraries DSM2 needs are precompiled and placed in the folder +lib: input storage, oprule, and third parties

+

If compiling is required, refer their compile details at CMake +Build.

+

+

Since third_party folder is very big and not easy to copy around, one +way to share the same package (w/o increase disk occupance) is to use +Windows mklink

+

Use admin right to open a command window and type in:

+

mklink /D {target location}\third_party {original location}\third_party

+

For internal users use this command

+

mklink /D +third_party \cnrastore-bdo\Delta_Mod\Share\DSM2\compile_support\third_party

+

Debug

+

Cmake also built up the project Visual Studio solution, which could be +used do the code debug.

+

See the following example for debug setting in Visual Studio 2015.

+

+

+

Make sure the 'debug info' is on the project you're working on (the +current VS set it as off by default).

+

+

Packaging

+

Download from share folder and change folder name to dsm2_distribute\

+

under branch (e.g. master), aside with dsm2

+

+

1. Generate tutorial PDF files:

+

a) Delete all PDF files in "dsm2_distribute\dsm2\tutorials\pdf"

+

b) Run "doc2pdf.vbs" in "\dsm2_distribute\dsm2\install\ to generate +PDFs from tutorial word documents.

+

2. Copy compiled binaries to distribution folder:

+

a) Check the DSM2 version and paths to the compiled binaries are +correct in the batch file "copy_hydro_qual_ptmDLL_notepad_style.bat" in +the folder \dsm2_distribute\dsm2\install\

+

Be aware of the version consistence for the 3 control files mentioned in +'DSM2 Versioning'; if not, correct them and re-compile from 6-4 in the +previous section

+

b) Run the batch file "copy_hydro_qual_ptmDLL_notepad_style.bat".

+


+
+Manually copy hydro.exe, qual.exe, ptm.dll, ptm.jar to the folder
+ \dsm2_distribute\dsm2\bin

+

3. Generate packaging script:

+

a) Check the DSM2 version is correct in the Python script +"DSM2setup_Script_Generate.py" in the folder +\dsm2_distribute\dsm2\install\

+

Be aware of the version consistence for the 3 control files mentioned in +'DSM2 Versioning'; if not, correct them and re-compile from 6-4 in the +previous section

+

b) Run this Python script to generate Inno Setup script +"DSM2setup_v8.iss".

+

4. Create DSM2 installation file:

+

Run "DSM2setup_v8.iss" with Inno Setup Compiler v5.2.3
+The installation file named "DSM2setup_8.X.Xrelease.XXXX.exe" will be +created in the same folder, \dsm2_distribute\dsm2\install\

+

+

5. Quick-test installer:

+

Test installation on a clean machine. Run historical hydro, qual_ec and +ptm on study templates.

+

6. Tag and version increment:

+

Create release tag for both "dsm2" source code on github and +"dsm2_distribute" folders on share-folder. 

+


+

+

For future usage, immediately increment DSM2 version number (3rd digit +to the next odd number) in the following three files:

+
    +
  • \dsm2\src\common\version_generate.py  
  • +
  • \dsm2_distribute\dsm2\install\copy_hydro_qual_ptmDLL_notepad_style.bat +  
  • +
  • \dsm2_distribute\dsm2\install\DSM2setup_Script_Generate.py
  • +
+

Attachments:

+

+debug_on.PNG (image/png)
+ +package1.PNG (image/png)
+ +debug1.png (image/png)
+ +debug.png (image/png)
+ +vers.PNG (image/png)
+ +lib.PNG (image/png)
+ +local1.PNG (image/png)
+ +github.PNG (image/png)
+ +srctree0.PNG (image/png)
+ +local0.PNG (image/png)
+ +image2017-6-13_15-47-59.png +(image/png)
+ +image2017-6-13_15-47-43.png +(image/png)
+ +image2017-6-13_15-47-35.png +(image/png)
+ +image2017-6-13_15-47-17.png +(image/png)
+ +image2017-6-13_15-46-58.png +(image/png)
+ +worddav07c1c902559a15d9cb8d941d966322cb.png +(image/png)
+ +worddavb0ec1d6cc7478dc4ec73bc27abb42880.png +(image/png)
+ +worddav0037c3b8067a8dd0d52094029690277b.png +(image/png)
+ +worddav490ef33751ab42acaa896e9bb7dc2dc7.png +(image/png)
+ +worddav73b529f2e3382f4bb77f505185a10945.png +(image/png)
+ +worddavc7e39738a4a8caa213b31d248d81f87b.png +(image/png)
+ +worddavb58034debe15b3d2514f722580c782ad.png +(image/png)
+ +worddav1c1324ef8177e0822bb62d9cdf8fdb05.png +(image/png)
+ +worddava9503121f9ac50fd0060de1b95c6decc.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/build/DSM2_v82_PTM_Compiling_with_Eclipse/index.html b/build/DSM2_v82_PTM_Compiling_with_Eclipse/index.html new file mode 100644 index 00000000..5c1ac98e --- /dev/null +++ b/build/DSM2_v82_PTM_Compiling_with_Eclipse/index.html @@ -0,0 +1,847 @@ + + + + + + + + + + + + + + + + + + DSM2 v82 PTM Compiling with Eclipse - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 v82 PTM Compiling with Eclipse

+

This memo presents step-by-step instruction on establishing editable +project, compiling and packaging DSM2 (version 8) PTM module in Eclipse. +Note this is the version for PTM source code editing purpose. The +version for straight compiling is in the DSM2 compiling memo: +DSM2_v8_0_Compiling_and_Packaging.

+

Required Software and Source Code Check out

+
    +
  1. Java Development Kit + http://java.sun.com/javase/downloads/index.jsp
  2. +
  3. Eclipse-jee-galileo-win32 + +http://www.eclipse.org/downloads/packages/release/galileo/sr2+
  4. +
  5. PTM Java source code are checked out in the DSM2 Compile & Package + document, with the whole DSM2 coding package:
  6. +
+

The specific folder directory on DWR SVN server is:
++http://dminfo.water.ca.gov:8686/svn/repository/models/branches/dsm2_v8_0/src/ptm/DWR/DMS/PTM+
+and could be put at following directory on local computers:
++D:\delta\models\dsm2_v8_0\src\ptm+

+

Build up project
+Method 1: with Eclipse project description file
+File -> Import -> Existing Projects into Workspace -> Next -> Select +root directory (Browse) to where .project resides -> Ok -> Finish
+
+

+

Method 2: w/o Eclipse project description file

+
    +
  1. Set up workspace
  2. +
+

Double click to open Eclipse and set up the workspace (default path as +d:\workspace);
+*This is the path for source code, libraries, compiled bytecode files, +and et cetera. Source codes are copied from the location specified in +the previous step.
+*Changes made in workspace could be compiled and tested independently, +then copied back to the DSM2 model package folders, and finally +committed to the SVN server.
+

+
    +
  1. Create the PTM project in workspace
  2. +
+

File->New->Java Project
+
+'Project name' input e.g. ptm
+'Project layout' select as 'Use project folder as root for sources and +class files' (with src and bytecode stored at one place; the other +option is also ok for use)
+
+Next->Libraries->Add External JARs->Select 3 jar files (COM.jar, +edu.jar, xml.jar) at directory:
+D:\delta\models\dsm2_v8_0\src\ptm\lib
+Finish
+

+
    +
  1. Create Package to include source codes:
  2. +
+

Right click ptm in the Package Explorer->New->Package
+
+Input Package Name under Source folder ptm: DWR.DMS.PTM
+
+Import java source codes
+
+General->File System->Next
+From directory->Browse (DSM2 PTM Java src folders) ->OK
+
+Check PTM box on the left window (to include all the java +files)->Finish
+
+*Eclipse would automatically create 2 packages for the sub-folders:
+DWR.DMS.PTM.behave
+DWR.DMS.PTM.tools
+*If some error msg like 'access restriction' comes out: Try to remove +the JRE System library from Project ptm->Properties->Java Build +Path->Libraries, and add it again
+Compile and debug PTM

+
    +
  1. Compile source code to bytecode
  2. +
+

Project->Build Project
+
+Bytecodes (.class) are stored at the same place as source codes +(.java)
+Automatic compilation could be enabled by Project -> Build +Automatically

+
    +
  1. Run/Debug Variables Configuration
  2. +
+

Set up the PTM input study file in IDE configuration
+Menu Run/Debug-> Run/Debug Configuration
+
+Java Application->New
+
+e.g.
+Main Tab:
+Name: MainPTM
+Main Class: DWR.DMS.PTM.MainPTM
+Argument Tab:
+Program arguments: historical_ptm.inp (the sample PTM input file in a +DSM2 v8 historical study; make sure the hydro file has been run first +and h5 file exists)
+VM arguments: -ss1m -mx512m -oss1m
+Other: D:\delta\dsm2_v8\study_templates\historical
+Environment Tab:
+DSM2_HOME d:\delta\dsm2_v8
+Path d:\delta\dsm2_v8\bin
+
+
+

+
    +
  1. Run/Debug PTM
  2. +
+

Switch to Debug Mode, which enable breakpoint setting and variable +tracking
+Run->Run/Debug MainPTM for calculation checking
+

+

Export back to DSM2 package

+
    +
  1. Combine the PTM Java bytecotes and related libraries into a JAR file
  2. +
+

File->Export->Java->JAR file
+
+
+Check ptm box on the left to select all the bytecode files and +libraries
+Select the export destination: D:\workspace\ptm\ptm.jar
+

+
    +
  1. Copy the JAR file to the DSM2 distribution binary folder for + packaging:
  2. +
+

+D:\delta\models\dsm2_distribute\dsm2\ptm\lib+
+Other PTM related files, e.g. ptm.dll, are compiled in related C++ and +Fortran projects of DSM2 Visual Studio solution. Please see the document +for details.

+

Attachments:

+

+worddav44179cfae725fe5f39c7b918214b4d68.png +(image/png)
+ +worddav21d73a7e0e593bfd3b0107605d9a879c.png +(image/png)
+ +worddave3944aea30dfda849398f8b2d35e5ca0.png +(image/png)
+ +worddav166bb58986798c0c19da41ec64557bc5.png +(image/png)
+ +worddav0e06297fcdac03d480d52d4303a8f664.png +(image/png)
+ +worddav974c8dfc9bac21524c2e729c9ddf7938.png +(image/png)
+ +worddav4acaef0242e743933e42a19aac4be497.png +(image/png)
+ +worddava8b154265c7f02598531839e7c2691d2.png +(image/png)
+ +worddav92a273d21de65a713f3c72c11cdd9d2f.png +(image/png)
+ +worddavca883dcc4d073032cba8861c22f82023.png +(image/png)
+ +worddav6787134635058c6f17e54d0d92dfbceb.png +(image/png)
+ +worddava5ae1f4d3a3483dc4320496d098a0bc6.png +(image/png)
+ +worddav7728359a07b9c4738d8f2fc9e1dcbe00.png +(image/png)
+ +worddavba79a0a41e348cbfbb98c6c379e69d73.png +(image/png)
+ +worddavaa8687d7165c7477fa2fe14970938e00.png +(image/png)
+ +worddav0497cdcda22d3ff9a889885b64ed225a.png +(image/png)
+ +worddav911bf0d4648403b4cd5c945075026a73.png +(image/png)
+ +worddav7a2f929ee2f765ed2f7e2f8c617f4e9a.png +(image/png)
+ +worddavd39e409d006ca7cd1fececf0ba72f5d5.png +(image/png)
+ +worddaveec0e1cc149a8a2c30bf9d635c413987.png +(image/png)
+ +worddav769c68dd95f5a8d049cd75682818d784.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/build/Debugging_JNI_code_with_Eclipse_and_Visual_Studio_20xx_/index.html b/build/Debugging_JNI_code_with_Eclipse_and_Visual_Studio_20xx_/index.html new file mode 100644 index 00000000..f94953b4 --- /dev/null +++ b/build/Debugging_JNI_code_with_Eclipse_and_Visual_Studio_20xx_/index.html @@ -0,0 +1,576 @@ + + + + + + + + + + + + + + + + + + Debugging JNI code with Eclipse and Visual Studio (20xx) - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Debugging JNI code with Eclipse and Visual Studio (20xx)

+

This document shows how to setup an Eclipse project (e.g. PTM) with JNI +(native C/C++/Fortran code) with Visual Studio (e.g. 2015) 

+
    +
  1. +

    Use 32 bit version of Eclipse (e.g. eclipse-java-neon-2-win32) and + setup PTM project

    +
      +
    1. Browse + over to the checked out version of dsm2 and look under + dsm2/src/ptm. 
    2. +
    3. Create a debug configuration. Make sure to point to the + directory where PTM.dll is built in debug mode. E.g. + d:\dev\dsm2\master\dsm2\BUILD\Debug\ is where cmake builds the + Debug version of the projects
      +
    4. +
    +
  2. +
  3. +

    Start debug from Eclipse and make sure to pause on some line of code + before JNI code is invoked. 

    +
  4. +
  5. +

    Use Visual Studio code and make sure to be in Debug configuration. + Then attach to the running Java process in 2 using the remote attach + to process
    +  
    +

    +

    You will need to be able to identify the process in 2 by its PID or +its name. 

    +
  6. +
  7. +

    Set breakpoint in native code
    +

    +
  8. +
  9. +

    Release the paused Java code in the Eclipse debugger. When the + native code trigger is hit it will stop at the above breakpoint.

    +
  10. +
+

Attachments:

+

+image2020-11-10_13-26-14.png +(image/png)
+ +image2020-11-10_13-24-53.png +(image/png)
+ +image2020-11-10_13-22-32.png +(image/png)
+ +image2020-11-10_13-16-30.png +(image/png)
+ +image2020-11-10_13-14-39.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/build/Developer_and_Build/index.html b/build/Developer_and_Build/index.html new file mode 100644 index 00000000..01068079 --- /dev/null +++ b/build/Developer_and_Build/index.html @@ -0,0 +1,591 @@ + + + + + + + + + + + + + + + + + + Developer and Build - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Developer and Build

+

Basic steps

+
    +
  • +

    Install prerequisite softwares

    +
  • +
  • +

    Checkout DSM2 from github repository 

    +
  • +
  • +

    Compile and build input_storage and oprule libraries (may also need + third party)

    +
  • +
  • +

    Compile and build DSM2 (That should result in the hydro.exe and + qual.exe and ptm.dll in the build folders)

    +
  • +
  • +

    Test and validate the newly compiled

    +
  • +
  • +

    Copy and update DSM2 distribution package

    +
  • +
  • +

    Package for DSM2 new release

    +
  • +
+

DSM2 version 82* compilation and packaging assuming a minor (bug-fix) +release. 

+ +

Required Software

+
    +
  1. Visual Studio 2015 (check its installation and management details + at Intel Compiler Installation for + Windows)
  2. +
  3. Intel Composer for Fortran and C++ (Parallel Studio 2019)
  4. +
  5. Cmake + 3.14 https://cmake.org/ (better + use a latest stable version, not *rc)
  6. +
  7. Git https://git-scm.com/downloads
  8. +
  9. Source Tree (free git client, optional) + https://www.sourcetreeapp.com/
  10. +
  11. Flex and Bison packages in Cygwin + http://www.cygwin.com/setup.exe  (make + sure {cywin}/bin in the environment path)
  12. +
  13. Inno Setup Compiler v5.2.3 + http://files.jrsoftware.org/is/5/isetup-5.2.3.exe
  14. +
  15. Python 3 http://www.python.org/download/
  16. +
  17. Java Development Kit (32-bit)  + http://java.sun.com/javase/downloads/index.jsp
  18. +
  19. Microsoft Office 2010
  20. +
  21. Apache ANT http://ant.apache.org/bindownload.cgi
  22. +
+

Note: make sure all software have their binary, header, or library +folder set in the environment variables. (given that Department Virtual +Machines may prohibit editing, users can still edit in their own +account. Be aware of the software version which could fail some step of +compiling)

+

+

Validation

+

To test new compiled DSM2 and see its difference from older version, the +following tools are often used:

+
    +
  • DSM2-vista Compare DSS Files + Tool requires + output settings (pathnames) exactly the same, but provides a quick + summary of accumulated difference, especially useful when we want to + confirm if two versions are the same. 
  • +
  • DSM2-vista Compare DSS + Tool is flexible + to compare between different pathnames, yet requires users set up + configuration one-by-one.
  • +
  • HEC-DSSVue has a compare function in its 'Tools' menu
  • +
+

Attachments:

+

+image2019-11-15_10-58-45.png +(image/png)
+ +image2019-11-15_10-54-29.png +(image/png)
+ +image2019-11-15_10-53-40.png +(image/png)
+ +DSM2_v8_0_PTM_Compiling_eclipse.docx +(application/vnd.openxmlformats-officedocument.wordprocessingml.document)
+ +DSM2_v8_0_Compiling_and_Packaging.docx +(application/vnd.openxmlformats-officedocument.wordprocessingml.document)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/build/Docker_Builds/index.html b/build/Docker_Builds/index.html new file mode 100644 index 00000000..37bb6b33 --- /dev/null +++ b/build/Docker_Builds/index.html @@ -0,0 +1,471 @@ + + + + + + + + + + + + + + + + + + Docker Builds - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/build/HDF5_CMake_Static_Build/index.html b/build/HDF5_CMake_Static_Build/index.html new file mode 100644 index 00000000..c4547170 --- /dev/null +++ b/build/HDF5_CMake_Static_Build/index.html @@ -0,0 +1,563 @@ + + + + + + + + + + + + + + + + + + HDF5 CMake Static Build - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

HDF5 CMake Static Build

+

DSM2 binaries are built with static links (no DLLs are needed). However +HDF5 1.8.10+ does not support static builds as there are fundamental +problems if parallel support is enabled. However DSM2 does not need the +parallel support and static builds are very convenient for us.

+

The information here was documented in  DSM2-117 - Update HDF5 library +to 1.8.19 or later Done  and the kernel of information is included here +for future HDF5 builds for static linking.

+

This blog entry explains how to build with /MT flag

+

https://blog.afach.de/?page_id=421

+

HDF5 Static (with /MT flag) compilation Auto compile script – Visual Studio

+

This is a compile script that compiles HDF5 libraries from source +statically with multithread support, i.e., “/MT” flag in Visual Studio. +automatically.

+

Warning

+

After discussing with one of the programmers of HDF5, it was made clear +that linking statically works safely only in the condition HDF5 +library wasn’t compiled with parallel support.

+

The script

+

The script involves going to the file
+config\cmake\UserMacros\Windows_MT.cmake
+and copying the file’s contents to “UserMacros.cmake”. The same is also +done for ZLib and SZip after extracting them, and rezipping them again.

+
@echo off
+::The following is the name of the folder of HDF5 source
+set "hdffolder=hdf5-1.8.16"
+
+::add a new line then add /MT compilation options
+call echo & echo. >> %hdffolder%\UserMacros.cmake
+cat %hdffolder%\config\cmake\UserMacros\Windows_MT.cmake >> %hdffolder%\UserMacros.cmake
+for %%i in (%hdffolder%\UserMacros.cmake) do sed -i "s/\"Build With Static CRT Libraries\" OFF/\"Build With Static CRT Libraries\" ON/g" %%i
+
+::add a new line then add /MT to SZip after extracting it, and then recompress it
+gzip -dc SZip.tar.gz | tar -xf -
+mv SZip.tar.gz SZip-dynamic.tar.gz
+call echo & echo. >> UserMacros.cmake
+cat SZip\config\cmake\UserMacros\Windows_MT.cmake >>SZip\UserMacros.cmake
+for %%i in (SZip\UserMacros.cmake) do sed -i "s/\"Build With Static CRT Libraries\" OFF/\"Build With Static CRT Libraries\" ON/g" %%i
+tar cf SZip.tar SZip\
+gzip SZip.tar
+rm -r SZip
+
+::do the same to ZLib
+gzip -dc ZLib.tar.gz | tar -xf -
+mv ZLib.tar.gz ZLib-dynamic.tar.gz
+call echo & echo. >> UserMacros.cmake
+cat ZLib\config\cmake\UserMacros\Windows_MT.cmake >>ZLib\UserMacros.cmake
+for %%i in (ZLib\UserMacros.cmake) do sed -i "s/\"Build With Static CRT Libraries\" OFF/\"Build With Static CRT Libraries\" ON/g" %%i
+tar cf ZLib.tar ZLib\
+gzip ZLib.tar
+rm -r ZLib
+
+build-VS2013-32.bat
+
+

Requirements

+

1- CMake (add +its executable folder to path)
+2- [GOW

+

| | +|---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| rel="nofollow">https://github.com/bmatzelle/gow/downloads]3-  href="https://www.visualstudio.com/en-us/products/visual-studio-express-vs.aspx"
rel="nofollow">Visual Studio or C++ Express
 (this you can get for free from Microsoft, but I assume you know enough about this already since you’re here) |

+

Note: If CMake won’t show in path in command prompt, run prompt as +administrator, or use this command to add the path you want to the +environment variable %PATH%
+ set PATH=C:\Program Files (x86)\CMake\bin;%PATH%
+

+

Gow is GNU tools for windows, like tar, gzip and sed. These are +important for the script.

+

Whether you’d like to have a 32-bit or 64-bit version of visual studio +used depends on the environment variables that are defined. The easiest +way is to run the run command prompt for the version you want. For +example, in Visual Studio 2013, if one goes to Start, then types in +quick search “Visual”, you’ll find a folder called “Visual Studio +Tools”. This folder will have both command prompts with the relevant +environment variables. The following shows this folder:

+

+

Prepare to run the script

+

Go +to this +page, and download the CMake source. Extract it; put the script in a +file there; if the version you want to compile is different than the one +in the script, modify the folder name; and finally run the script. After +the script is finished, you’ll have a compressed zip file with compiled +source and an installer executable.

+

The file HDF5CompileScript.bat is where I +copied the script of compile that I created. Just run this script +through the command prompt of visual studio and it’ll compile.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/build/PTM_DLL_Static_Build/index.html b/build/PTM_DLL_Static_Build/index.html new file mode 100644 index 00000000..b6846d32 --- /dev/null +++ b/build/PTM_DLL_Static_Build/index.html @@ -0,0 +1,517 @@ + + + + + + + + + + + + + + + + + + PTM DLL Static Build - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

PTM DLL Static Build

+

On windows it is not straightforward to compile to a DLL by mixing /MT +(static) and /MD (dynamic) linking flags. However we want to create exes +(hydro, qual, gtm) that have no dependencies on system libraries at +runtime. This means we compile all libraries with /MT flags.

+

DSM2-174 - PTM DLL compiling issue Resolved

+

PTM is a dll so we have to override the libraries the compiler and +linker search for by default.

+

For the standard C/C++ libraries more information at https://docs.microsoft.com/en-us/cpp/c-runtime-library/crt-library-features?view=vs-2019

+

For the Intel libraries more information at https://software.intel.com/en-us/articles/libraries-provided-by-intelr-c-compiler-for-windows-and-intel-parallel-composer

+

The following libraries are then ignored so that the static versions of +the libraries are packaged into the .dll file itself. This increases the +size of the DLL but then during runtime there are no other dependencies +on any system or intel libraries. In other words, it can stand alone and +run.

+

The libraries ignored are 

+
msvcrt.lib;libmmd.lib; mscvcprt.lib;libucrtd.lib;
+
+

If you ever need to see all the libraries being used turn the /VERBOSE +feature on the linker options in Visual Studio

+

+

Attachments:

+

+image2019-4-29_12-54-20.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/build/Troubleshooting/index.html b/build/Troubleshooting/index.html new file mode 100644 index 00000000..f03eafde --- /dev/null +++ b/build/Troubleshooting/index.html @@ -0,0 +1,513 @@ + + + + + + + + + + + + + + + + + + Troubleshooting - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Troubleshooting

+ +

Attachments:

+

+image2020-1-15_8-29-2.png +(image/png)
+ +image2020-1-15_8-21-6.png +(image/png)
+ +image2020-1-15_8-20-52.png +(image/png)
+ +image2020-1-15_8-19-39.png +(image/png)
+ +image2020-1-15_8-19-11.png +(image/png)
+ +image2020-1-15_8-18-51.png +(image/png)
+ +image2020-1-15_8-17-37.png +(image/png)
+ +image2020-1-15_8-17-16.png +(image/png)
+ +image2020-1-15_8-16-44.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/build/Troubleshooting_Eclipse/index.html b/build/Troubleshooting_Eclipse/index.html new file mode 100644 index 00000000..dec4eda0 --- /dev/null +++ b/build/Troubleshooting_Eclipse/index.html @@ -0,0 +1,467 @@ + + + + + + + + + + + + + + + + + + Troubleshooting Eclipse - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Troubleshooting Eclipse

+

Eclipse Tips

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/calibration/Calibration_Memo/index.html b/calibration/Calibration_Memo/index.html new file mode 100644 index 00000000..2ba4b6b5 --- /dev/null +++ b/calibration/Calibration_Memo/index.html @@ -0,0 +1,588 @@ + + + + + + + + + + + + + + + + + + Calibration Memo - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Calibration Memo

+

Links needed here to the Calibration Memo

+

Attachments:

+

+worddav6ba4ded1dc3ad6637d41e1cab3b0ac67.png +(image/png)
+ +worddav31e2094a0a30cbe2b2f68fd01ded456e.png +(image/png)
+ +worddav87fd53a098c286a8a765388c0fad3872.png +(image/png)
+ +worddav858bf66748366e3ac5293838851bbe82.png +(image/png)
+ +worddavadc677a457af87d9f187accca7e45d26.png +(image/png)
+ +worddavd77556e825a07cebeb566d19d30ea639.png +(image/png)
+ +worddav3935cdcf6dfd7bee0e4e0e6f048bb4b6.png +(image/png)
+ +worddav2a890e9699b30d8faddba3233e1b781e.png +(image/png)
+ +worddavecaeebb6ecfc62b86e1550341e82617a.png +(image/png)
+ +worddav5b932002973e34737ab05aec46278f41.png +(image/png)
+ +worddavc5d792e811d89f630b2b3cb9afddbdf8.png +(image/png)
+ +worddav867f30829ec80c80af38c4e36e1035ec.png +(image/png)
+ +worddav9be8d4cf703e37b5d1723218d05f472b.png +(image/png)
+ +worddavb1c809d7f14b67e2012575da1956af47.png +(image/png)
+ +worddavd6a8df921a23a973f133ab348abec003.png +(image/png)
+ +worddavf6bb9ddadcecd2bfc8abe14f18e5a7cc.png +(image/png)
+ +worddav338764269ad059ddfcaf2d744b740015.png +(image/png)
+ +worddavb3e65a876c6772d6a9ad05aca7b33c01.png +(image/png)
+ +worddav4cacbb121a7846293e65de267a4f33a8.png +(image/png)
+ +worddav98e5298510a8f5160c95d49e5f7f7bad.png +(image/png)
+ +worddav3d94a048a0fda9ef690959da7c503a0d.png +(image/png)
+ +worddav99cfce7a2d97c6e99e26dbcecb6c889b.png +(image/png)
+ +worddav97f65dbaf0d433fd0b85fb28edce59d2.png +(image/png)
+ notes +for calibration refine.txt +(text/plain)
+ +Memo_DSM2_V8.1Beta_Calibration.docx +(application/vnd.openxmlformats-officedocument.wordprocessingml.document)
+ +Hydro_Calibration_Stations.jpg +(image/jpeg)
+ +Hydro_calibration_notes.txt +(text/plain)
+ +flow_2009.pdf (application/pdf)
+ +flow_2008.pdf (application/pdf)
+ +flow_2007.pdf (application/pdf)
+ +flow_2002.pdf (application/pdf)
+ +EC_calibration_notes.txt +(text/plain)
+ +EC_Calib_station_2001_2008.jpg +(image/jpeg)
+ +EC.pdf (application/pdf)
+ +stage_2009.pdf (application/pdf)
+ +stage_2008.pdf (application/pdf)
+ +stage_2007.pdf (application/pdf)
+ +Stage_2002.pdf (application/pdf)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/calibration/Mini_Calibration_2009_/index.html b/calibration/Mini_Calibration_2009_/index.html new file mode 100644 index 00000000..3a3f8dfb --- /dev/null +++ b/calibration/Mini_Calibration_2009_/index.html @@ -0,0 +1,498 @@ + + + + + + + + + + + + + + + + + + Mini Calibration (2009) - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/dsm2_learning_series/index.html b/dsm2_learning_series/index.html new file mode 100644 index 00000000..0c0115ed --- /dev/null +++ b/dsm2_learning_series/index.html @@ -0,0 +1,551 @@ + + + + + + + + + + + + + + + + + + + + + + Planning Studies - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 Learning Series

+

The DSM2 Learning Series is a series of hybrid live and online classes held by the Delta Modeling Section.

+

Quick Start

+

The DSM2 Quick Start Training was held in 2023. +

+The slides shown in the training are available on our GitHub repository, and videos of the training are available on our Youtube playlist.

+

DSM2 Planning Studies

+

The DSM2 Planning studies training will be held in October 2023. +

+The slides shown in the training will be available on our Github repository, and videos of the training will be available on our Youtube playlist.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/faqs/DSM2_-_How_to_read_hdf5_output_files/index.html b/faqs/DSM2_-_How_to_read_hdf5_output_files/index.html new file mode 100644 index 00000000..0e962202 --- /dev/null +++ b/faqs/DSM2_-_How_to_read_hdf5_output_files/index.html @@ -0,0 +1,540 @@ + + + + + + + + + + + + + + + + + + DSM2 - How to read hdf5 output files - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 - How to read hdf5 output files

+

DSM2 writes output in HDF5 format. This format can be read by Vista and +vscript 

+

Step-by-step guide

+

To open a HDF5 file

+
    +
  1. Open Vista 
  2. +
  3. Drag and drop or use Session > Open > Tidefile from the menu + options
  4. +
  5. Select the data items needed and use the  Data > Export > Export + Data To DSS menu item to export the Data to DSS files
  6. +
+

Video of How to read DSM2 - HDF5 files using VISTA

+

Alternatively here is a snippet of vscript code that does something +similar

+

Getting average concentrations from Qual HDF5 file

+
from vtidefile import opentidefile
+from vdss import writedss
+from vutils import *
+import vdisplay
+from vdisplay import plot
+import sys
+import string
+
+def get_avg_conc(tidefile, chan, twstr):
+    tf=opentidefile(tidefile)
+    if twstr != None:
+        print 'Timewindow: %s'%twstr
+        tw=timewindow(twstr)
+    else:
+        tw=None
+    refs=tf.find(['','^%s$'%chan,'AVG CONC'])
+    if refs and len(refs)==1:
+        print "Getting data %s"%(str(chan))
+        if tw!=None:
+            ref=DataReference.create(refs[0],tw)
+        else:
+            ref=refs[0]
+        return ref.data
+    else:
+        raise "No data found for %s in file %s"%(chan, tidefile)
+if __name__ == '__main__':
+    if len(sys.argv) != 2:
+        print "Usage: vscript 
+    tidefile=sys.argv[1]
+    twstr="01JUL2014 0000 - 01AUG2014 0000"
+    chans=[291,290,436,435,434,433]
+    chan_concs=[]
+    for chan in chans:
+        chan_concs.append(get_avg_conc(tidefile, chan, twstr))
+
+    for conc in chan_concs:
+        plot(conc)
+
+

DSM2-vista also supports export data to hec-dss format (One or multiple +timeseries path could be selected) See the following menu option as +example. 

+

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/faqs/DSM2_Seems_to_indicate_missing_irregular_gate_position_data/index.html b/faqs/DSM2_Seems_to_indicate_missing_irregular_gate_position_data/index.html new file mode 100644 index 00000000..d1218d92 --- /dev/null +++ b/faqs/DSM2_Seems_to_indicate_missing_irregular_gate_position_data/index.html @@ -0,0 +1,728 @@ + + + + + + + + + + + + + + + + + + DSM2 Seems to indicate missing irregular (gate position) data - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 Seems to indicate missing irregular (gate position) data

+

Problem: Hydro displays the following.

+ + + + + + +

Error in reading time-varying data:
+Current time is 01SEP2001 2400; earliest data time for
+/HIST+GATE/MTZSL/BOATLOCK_OP//IR-DECADE/DWR-ESO/
+is 

+


+

+ +

Brad Tom Related to Jira issue. I think we should open an issue there as +you are doing the practical fix for this known +issue 

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
KeySummaryTCreatedUpdatedDueAssigneeReporterPStatusResolution
DSM2-106Timeseries interpolation[<img
src="http://msb-jira/secure/viewavatar?size=xsmall&avatarId=10303&avatarType=issuetype"
class="icon" alt="Bug" />](http://msb-jira/browse/DSM2-106?src=confmacro)Nov 30, 2011Jan 03, 2022Nicky SandhuInes Ferreira<img src="http://msb-jira/images/icons/priorities/medium.svg"
class="icon" alt="Medium" />ResolvedWon't Do
+

1 issue

+

But there are data values in this time series with dates before the +current date.

+

The problem is: When using an IR-DECADE dss path, there must be a value +with a timestamp that is at the beginning of the current decade. In this +case, a record is required that has a timestamp of 31DEC2000 2400.

+

To fix this:

+ + + + + + + + + + + + + + + +
+
    +
  1. Tabulate the data in HEC-DssVue. There is no beginning of decade +timestamp.
  2. +
+

+
+

2. Turn on "Allow Editing"

+

+
+

3. Select the row before the end of the previous decade, and select +"Insert Rows":

+

+
+

4. Change "Number Rows" to 1.

+

+
+

5.Enter the timestamp that is needed, with a value equal to the value +in the previous record: 

+

+
+

6. Save the data:

+

+
+ +

Attachments:

+

+image2020-3-25_11-0-58.png +(image/png)
+ +image2020-3-25_10-39-44.png +(image/png)
+ +image2020-3-25_10-39-7.png +(image/png)
+ +image2020-3-25_10-38-32.png +(image/png)
+ +image2020-3-25_10-38-16.png +(image/png)
+ +image2020-3-25_10-37-42.png +(image/png)
+ +image2020-3-25_10-36-6.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/faqs/DSM2_inputs_are_off_by_1DAY/index.html b/faqs/DSM2_inputs_are_off_by_1DAY/index.html new file mode 100644 index 00000000..59fce41e --- /dev/null +++ b/faqs/DSM2_inputs_are_off_by_1DAY/index.html @@ -0,0 +1,589 @@ + + + + + + + + + + + + + + + + + + DSM2 inputs are off by 1DAY - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 inputs are off by 1DAY

+

DSM2-241 - Check daily inflows and warn if INST-VAL Open

+

A problem was reported with a run that was supposed to be a historical +run with inflows scaled up by a factor. 

+ + + + + + + + + + + +
+
    +
  1. Here's a plot of model output at Vernalis
  2. +
+
    +
  • Blue=historical stage output
  • +
  • Red=scaled up stage output
  • +
  • Green=historical flow output
  • +
  • Black=scaled up flow output
  • +
+

On February 14, 1992, both the scaled up flow and stage outputs are +higher than historical.

+

+
+

2. The problem was caused by the "Type" of the scaled up flow input +tine series. The user had created a series with a type of INST-VAL.

+

This results in changes in inflow taking effect at the end of the day +rather than the beginning of the day.

+

One way to tell that a 1DAY time series is PER-AVER vs INST-VAL is +that HEC-DSSVue plots PER-AVER time series as a square wave, but not +INST-VAL.

+

+
+


+

+

3. To check Type and to change it, in HEC-DSSVue, right click +on the series and select Edit, and 

+

+

use the dropdown to select a new type.

+

+

4. After re-running with the change, +results are as expected.

+

+
+


+

+
+ +

Attachments:

+

+image2020-1-15_8-19-11.png +(image/png)
+ +image2020-1-15_8-20-52.png +(image/png)
+ +image2020-1-15_8-21-6.png +(image/png)
+ +image2020-1-15_8-29-2.png +(image/png)
+ +image2020-1-15_8-19-39.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/faqs/Generating_DSM2_Hydro_boundary_conditions_from_the_DCR_2017_CALSIM_II_output/index.html b/faqs/Generating_DSM2_Hydro_boundary_conditions_from_the_DCR_2017_CALSIM_II_output/index.html new file mode 100644 index 00000000..494495eb --- /dev/null +++ b/faqs/Generating_DSM2_Hydro_boundary_conditions_from_the_DCR_2017_CALSIM_II_output/index.html @@ -0,0 +1,517 @@ + + + + + + + + + + + + + + + + + + Generating DSM2 Hydro boundary conditions from the DCR 2017 CALSIM II output - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Generating DSM2 Hydro boundary conditions from the DCR 2017 CALSIM II output

+

run DCR 2017 with CWF script.zip

+

I tried to generate the DSM2 Hydro boundary conditions from the DCR 2017 +CALSIM II output (with a 2020 development level) .  But for this CALSIM +II output, we don’t have a corresponding script that can be used to +generate the boundary conditions. The closest script available is from +CALWATERFix for the previous version of CALSIM II output (with a 2005 +development level).  So I used this script.  After some basic edits +(e.g., change directory, file name, etc.),  the script ran and generated +the boundary conditions.  However, because the script and the CALSIM II +output are not paired, using the old script for the new output could +introduce errors.  I compared the boundary conditions generated from DCR +2017  with those for CALWATERFix (generated with the same script but +from an older version of CALSIM output).  The patterns are match but +there are noticeable differences at some spots (e.g., Aug 24 1994 or +July 19 2001).  I haven't figured out what exactly caused the +differences, but could be the different assumptions used in the CALSIM +II studies.  

+

I have a read me file inside of the zip file to provide the instruction +about how to run the script.  The zip file is too big to upload so I +left two files out.  You can download the left out files here:

+
    +
  1. this file should be in .\run DCR 2017 with CWF + script\timeseries Planning_Tide_82years.zip
  2. +
  3. this file should be in .\run DCR 2017 with CWF + script\studies\planning\timeseries\CALSIM2020D09EDV__2017DCR_OldANN_NewWSIDI-SWPDemand_x64_20171115.zip
  4. +
+

Attachments:

+

+2020D09EDV__2017DCR_OldANN_NewWSIDI-SWPDemand_x64_20171115.zip +(application/zip)
+ +Planning_Tide_82years.zip +(application/zip)
+ run DCR +2017 with CWF script.zip +(application/zip)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/faqs/How-to_articles/index.html b/faqs/How-to_articles/index.html new file mode 100644 index 00000000..44b4064a --- /dev/null +++ b/faqs/How-to_articles/index.html @@ -0,0 +1,513 @@ + + + + + + + + + + + + + + + + + + How-to articles - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/faqs/How_does_DSM2-Hydro_calculate_channel_volume_/index.html b/faqs/How_does_DSM2-Hydro_calculate_channel_volume_/index.html new file mode 100644 index 00000000..c6ae2273 --- /dev/null +++ b/faqs/How_does_DSM2-Hydro_calculate_channel_volume_/index.html @@ -0,0 +1,528 @@ + + + + + + + + + + + + + + + + + + How does DSM2-Hydro calculate channel volume? - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

How does DSM2-Hydro calculate channel volume?

+
    +
  • Hydro creates virtual cross-sections by interpolating cross-section + input (see Tutorial 1: Channels) + to create virtual cross-sections. Virtual cross-sections are created + and used internally in Hydro by interpolating cross-section input.
  • +
  • Virtual cross-sections are usually not seen by the user. If the + variable printlevel >= 5 in the SCALAR input section, virtual + cross-sections will be written to output .hof file.
  • +
  • The 2012 Annual Report describes a change in the way volume is + calculated: it used to use only the area of the cross-section in the + middle of a computational reach, but now it uses all 3 of the + cross-sections in a computational reach. Also, it describes an + important change to the longitudinal interpolation used to create + virtual cross-sections.
  • +
  • The 2016 Annual Report, section 3.4.2 indicates that the volume of a + channel is calculated by multiplying the average of two + cross-sectional areas by the distance between them. This process + would then be repeated twice for each computational reach to find + the volume.
  • +
  • Hydro will not converge well if cross-sectional area is not + interpolated correctly. Previously, area at a given elevation + between cross-section layers was calculated by interpolating area + linearly between two layers. It has been changed to a = + a1+(.5*(w1+w2))*h, where
      +
    • a1 = area at lower elevation
    • +
    • w1 = width at lower elevation
    • +
    • w2 = width at higher elevation
    • +
    • h = distance from lower elevation to given elevation
    • +
    +
  • +
+

References

+

Annual reports can be found here.

+

Ferreira I. and Sandhu, N. 2016 "Chapter 3: DSM2 Extension: A GIS-Based +Approach."  In: Methodology for Flow and Salinity Estimates in the +Sacramento-San Joaquin Delta and Suisun Marsh. 37th Annual Progress +Report to the State Water Resources Control Board. California Department +of Water Resources.

+

Liu L., Ateljevich E., and Sandhu P. 2012. “Chapter 2: Improved Geometry +Interpolation in DSM2-Hydro.” In: Methodology for Flow and Salinity +Estimates in the Sacramento-San Joaquin Delta and Suisun Marsh. 33rd +Annual Progress Report to the State Water Resources Control Board. +California Department of Water Resources.

+

Tom B. 1998. “Chapter 6: Cross-Section Development Program.” In: +Methodology for Flow and Salinity Estimates in the Sacramento-San +Joaquin Delta and Suisun Marsh. 19th Annual Progress Report to the State +Water Resources Control Board. California Department of Water Resources.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/faqs/PTM_Frequently_Asked_Questions_FAQ_/index.html b/faqs/PTM_Frequently_Asked_Questions_FAQ_/index.html new file mode 100644 index 00000000..e49a11a8 --- /dev/null +++ b/faqs/PTM_Frequently_Asked_Questions_FAQ_/index.html @@ -0,0 +1,596 @@ + + + + + + + + + + + + + + + + + + PTM Frequently Asked Questions (FAQ) - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

PTM Frequently Asked Questions (FAQ)

+

Moving from ptm source directory in github. This should belong in the +documentation and not buried in source code

+

Q1. What is PTM?

+

PTM is Particle Tracking Model. It is written partly in Java and partly +in Fortran. I/O is mainly handled by Fortran.

+

Q2. What are the inputs to PTM?

+

1. Hydrodynamic information:
+This is the dynamic information about flow, flow-area, (therefore +velocity) and depth. This information comes from the tidefile which is +generated by hydro. One has to make sure that in addition to the +tidefile the correct network configuration is being used.  This +information is typically done by the following io structure.

+

TIDEFILE
+START_DATE START_TIME END_DATE END_TIME FILENAME
+generic none length none tidefile.out
+END

+

2. Network configuration:
+This defines how channels and reservoirs are linked up and what their +characteristics such as x-section, length, etcetra are. This also is +read by Fortran. - channels.inp, xsects.inp, junctions.inp, +reservoirs.inp, translations.inp

+

Refer to DSM2 docs

+

3. Particle information:

+

a. Type of particle:
+Uptil now we have been dealing only with neutrally-bouyant or particles +with a certain falling velocity. For other kind of particles such as +fish no IO had been decided.

+

b. Particle insertion information:
+Number of particles, time of insertion, location of insertion and +duration of insertion. Refer to ptm_insertion.inp.

+

PARTINP
+[NODE NPARTS SDATE STIME EDATE ETIME] |
+44 500 01jan1990 0133 05jan1990 0333

+

This means that insert 500 particles at 44 evenly distributed from start +time to end time

+

[NODE NPARTS SDATE LENGTH]
+44 600 01jan1990 5days
+END

+

c. Run time information

+

This is similar to run time settings for hydro and qual. Refer to DSM2 +docs.

+

d. PTM has the following scalars

+

SCALAR
+ptm_time_step 15min # PTM time step
+display_intvl 1hour # how often to display run progress
+ptm_ivert t # Use Vertical velocity profile
+ptm_itrans t # Use Transverse velocity profile
+ptm_iey t # Use transverse mixing
+ptm_iez t # Use vertical mixing
+ptm_fallvel 0.05 # settling velociy in ft/s
+ptm_random_seed 32001 # Starting Random Number Seed
+ptm_trans_constant 0.06 # Transverse Mixing Constant
+ptm_vert_constant 0.0067 # Vertical Mixing Constant
+END

+

d. IO from PTM

+

IO_FILES
+MODEL TYPE IO INTERVAL FILENAME
+ptm anim out 15min anim.bin # animation file
+ptm trace out none trace.bin # trace file
+ptm restart out 6hours restart.out # restart output file
+ptm restart in none restart.inp # restart input file
+END

+

Animation file:
+Contains the data for the first 100 particles movement every time +interval as specified. This is a binary file if the file name does not +end in ".out" else it will be a ascii file. One can use PTM Animator to +run the binary file to look at the animation visually

+

Trace file:
+The trace file contains the trace of every particle in the system. It +records the entrance/exit of a particle into a waterbody such as a +channel etcetra. ".out" for ascii file. The trace file is used to +calculate the flux and so the flux may be calculated after the PTM run.

+

Restart file:
+This is a snapshot of the current locations of every particle inthe +system. Useful mainly for restarting a run from a previously saved +state.

+

Flux information:
+PARTICLE_FLUX
+FROM_WB TO_WB INTERVAL FILENAME B_PART
+chan, 216 | qext, cvp | 15min flux.txt past_CVP
+res,clfct | qext,swp | 15min flux.txt past_SWP
+chan,436,53 | chan,442,437 | 15min flux.txt past_Chipps
+chan, 441 | stage, mtz | 15min flux.txt past_MTZ
+| qext,div,-cvp, -ccc | 15min flux.txt Ag_Diversions
+| qext,div | 15min flux.txt All_Diversions
+END

+

This instructs ptm to calculate flux from the trace file. This can be +done at the end of a ptm run or from a trace file later. Its the users +responsibility to provide the correct network configuration The filename +flux.txt means the user wants the output in ascii format else it would +be flux.dss which is output in dss format. The DSS pathnames B part == +B_PART from the above table.

+

The interval at which the flux is calculated is as given above in the

+

INTERVAL column

+

The particle flux is calculated from a waterbody to a waterbody. A +waterbody is specified by a type string followed by an identifier. The +type string is one of chan, res, qext, stage.

+

The identifier is either a number for the the chan or a name as defined +in the translations.inp file. If a generic type follows in place of the +identifier than the flux tracks all particles entering or leaving that +particular type of waterbody. If no from_wb or to_wb is defined it is
+assumed that it is the same as to all waterbodies.

+

Particle dynamics:

+

The particle can move in x, y and z directions. However as DSM2 is a 1-D +model this information is gleaned by applying a vertical and transverse +velocity profile to the average velocity available from the tidefile.

+

A particle has a deterministic and random component to its movement.

+

Deterministic component == Average velocity + transformations

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/faqs/index.html b/faqs/index.html new file mode 100644 index 00000000..6e3193d8 --- /dev/null +++ b/faqs/index.html @@ -0,0 +1,535 @@ + + + + + + + + + + + + + + + + + + + + + + FAQs - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + + + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/getting_started/index.html b/getting_started/index.html new file mode 100644 index 00000000..da50d29e --- /dev/null +++ b/getting_started/index.html @@ -0,0 +1,634 @@ + + + + + + + + + + + + + + + + + + + + + + Getting Started - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Getting Started

+

Welcome to DSM2. This section of the documentation is intended to help +you acquire and install dsm2, test that it is working, and understand the +layout of the distribution. After you have taken these steps, you will +probably want to tackle the tutorials in the /tutorials folder of the +distribution or consult the documentation or grid map in the +/documentation folder -- a link has been provided on the start menu to +make the documentation easier to find.

+

Getting DSM2

+

DSM2 is distributed by the California Department of Water Resources +Delta Modeling Section. You can find the model at the CNRA Open Data web site. +Currently we distribute Windows executables, tutorials, +source code, and a data package with templates for common studies.

+

Installing DSM2

+

DSM2 has been tested on Windows 10.
+DSM2 is distributed as a .zip file, which contains the model executables and input files.
+You should not unzip it to a location with spaces.
+We recommend D:\delta\dsm2 or C:\delta\dsm2. +Unzip it to a drive with a lot (gigabytes) of room. This will make it +easier to use in-place

+ +

DSM2 comes with a numerical model and scripting capabilities. It is +easier to use the model if you also have a text editor with syntax +highlighting, a tool for differencing text files ("diff tool"), a DSS +viewer and an hdf5 viewer.

+

Open command window here: Follow the instructions here to +add the option 'Open command window here' to the Windows Explorer context menu. You will need administrative privileges to do this, and you +should only do this if you are comfortable modifying the registry in Windows 10. This will allow you to open +a command window by right clicking on a folder in Windows explorer. DSM2 models and Python +scripts can be run in the command window. The tool is essential for working +with DSM2 efficiently.
+Notepad++ is a text editor that works well with DSM2 input data and +integrates nicely into the Windows file explorer. We support the editor +with syntax highlighting. Here are some instructions for configuring +Notepad++
+DiffMerge is a good free differencing tool for text files. Beyond +Compare is an inexpensive commercial product that is intuitive and also +compares Word files.
+Vista, one of the first graphical tools for examining data in HEC-DSS +format, comes with DSM2 in the /dsm2/vista/bin directory.
+HEC-DSSVUE is distributed by HEC and is actively maintained. Most people +use DSSVUE as their primary tool with Vista for specific tasks. An Excel +add-in for DSS data is also available on the HEC web page.
+HDFView and HDF-Explorer are two independent browsers for the HDF5 file +format. This lets you look inside a tidefile, one of the outputs of the +model. You only need one of them.

+

Test Launching DSM2

+

The first step is to see whether the installation was successful. To do +this, get a DOS-style command prompt window and type from any location:

+
C:\>hydro -v
+DSM2-Hydro 8.2.2  Git Version: 1468 Git GUI: 54a9cc3c
+Usage: Hydro input-file
+
+

If you got a message like the one above, you are up and running!

+

If instead you get this:

+
C:\>hydro -v
+'hydro' is not recognized as an internal or external command,
+operable program or batch file.
+
+

...you have a path problem and we need to straighten it out.

+

Your next stop should be to read the Tutorials.
+The Basic Tutorials (Tutorial 1-6) feature most of the nuances of the model using a very simple grid +and are an excellent way to learn about the model -- including +subtleties that are new or have caused confusion in the past. The Delta +Tutorial series are more applied -- tasks on the Delta. Doing some of +Delta Tutorial #1 as a motivator, and then tackling the simple ones is a +quick way to get a sense of the model.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/CSDP_Network_File_Format/index.html b/gis/CSDP_Network_File_Format/index.html new file mode 100644 index 00000000..75a3d3b0 --- /dev/null +++ b/gis/CSDP_Network_File_Format/index.html @@ -0,0 +1,547 @@ + + + + + + + + + + + + + + + + + + CSDP Network File Format - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

CSDP Network File Format

+

The CSDP network file stores centerlines, cross-section lines, and +cross-section points. The CSDP uses the information in the file to +create cross-section input for DSM2.

+

The format of the CSDP Network file predates the CSDP. It was created by +the consultant John Crapuchettes, who created the Bathymetry Data +Display (BDD) application, which was the predecessor to the CSDP.

+

See comments for explanations of the various lines in the file.

+

;HorizontalDatum: UTMNAD83
+;HorizontalZone: 10
+;HorizontalUnits: Meters
+;VerticalDatum: NAVD88
+;VerticalUnits: USSurveyFeet
+;Filetype: network
+;NumElements: 525
+"1" 18
+2140064.75,1.3689134E7
+2139796.0,1.3689089E7
+2139320.0,1.3689424E7
+2139205.25,1.3689698E7
+2139663.75,1.3690571E7
+2140078.25,1.3690641E7
+2140395.75,1.369087E7
+2140713.0,1.3691585E7
+2140713.0,1.3691928E7
+2140589.75,1.3692493E7
+2139928.25,1.3693401E7
+2139275.75,1.3693904E7
+2139240.5,1.3694195E7
+2139390.5,1.3694503E7
+2139822.5,1.3695402E7
+2139954.75,1.3695905E7
+2140422.0,1.369669E7
+2140480.5,1.3696834E7
+3
+"" 8
+-215.90325927734375,20.427509307861328
+-157.9310302734375,12.60617733001709
+-54.482757568359375,6.409266471862793
+92.41378784179688,-3.2046332359313965
+191.72413635253906,1.6023166179656982
+315.862060546875,4.247311592102051
+346.89654541015625,11.737451553344727
+458.6206970214844,20.54054069519043
+117.23826599121094 890.1288452148438
+"BT 8/12/2019: cloned from adjacent cross-section to prevent +interpolation to improve max area ratio"
+"" 8
+-355.90325927734375,20.427509307861328
+-297.9310302734375,12.60617733001709
+-194.48275756835938,6.409266471862793
+-47.58620834350586,-3.2046332359313965
+51.72413635253906,1.6023166179656982
+175.86207580566406,4.247311592102051
+206.89654541015625,11.737451553344727
+318.6206970214844,20.54054069519043
+662.5869750976562 903.2123413085938
+"KH,1/30/2019: moved the centerline to better line up with the most +recent survey data; re-created the cross-sections"
+"" 8
+-385.9397277832031,32.054264068603516
+-220.38327026367188,14.89919376373291
+-121.08013916015625,6.431451797485352
+-27.00348472595215,1.6935484409332275
+54.0069694519043,-2.036290407180786
+150.69686889648438,-0.32258063554763794
+218.64111328125,19.435483932495117
+341.4634094238281,34.15322494506836
+9270.8427734375 1192.212646484375
+"KH,1/30/2019: moved the centerline to better line up with the most +recent survey data; re-created the cross-sections *nl* *nl* BT +7/24/2019: adjusted to prevent drying up"

+

"2" 11
+2140424.0,1.3696842E7
+2141014.75,1.3698247E7
+2141700.5,1.369945E7
+2143323.0,1.3700658E7
+2143420.0,1.3701319E7
+2142970.25,1.3701777E7
+2141859.25,1.3701848E7
+2140686.5,1.3701619E7
+2139679.75,1.3701361E7

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/CSDP_Network_Summary_Report/index.html b/gis/CSDP_Network_Summary_Report/index.html new file mode 100644 index 00000000..6f91473a --- /dev/null +++ b/gis/CSDP_Network_Summary_Report/index.html @@ -0,0 +1,749 @@ + + + + + + + + + + + + + + + + + + CSDP Network Summary Report - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

CSDP Network Summary Report

+

The CSDP Network Summary Report is created by the CSDP. It can be used +to help identify issues and potential issues with cross-sections in the +currently loaded network file.

+

It also contains important comparisons of DSM2 Virtual Cross-Section +volume with GIS calculated volumes.

+

A network summary report uses the following input files:

+
    +
  1. An existing channels.inp file (such as + channel_std_delta_grid_NAVD_20150129.inp). This file is used to get + existing channel lengths for comparison, and to determine channel + connectivity.
  2. +
  3. The currently loaded network file.
  4. +
  5. A DSM2 output (.hof) file which was created from the network + file by running DSM2-Hydro with geometry created using the currently + loaded network file with printlevel>=5
  6. +
  7. A 2m meter DEM CutFill validity file, which was created + based upon a visual inspection of the extent of the coverage of + channel polygons with data in the 2m DEM files, using ArcGIS. If + coverage is complete or very nearly complete, the validity is true.
  8. +
  9. CutFill results files, each containing results from the CutFill + operations for a given DEM.
  10. +
  11. (Optional): a list of channel groups. Default is the list of + groups for which polygons were created and used in the CutFill + operations: "448_449_572_573, 439_440_441_451_452_453_454, + 438_443_444_450_570_571_574_575,290-294,281_282_295_296_297_301". + You can add to this list.
  12. +
+

The report contains, for a given stage (usually 0.0 NAVD)

+
    +
  1. Channel: The name/number of the DSM2 channel. Could also be a + group of channels. Examples: Sherman Lake would be identified as: + 290-294, Grizzly Bay would be identified as: 448_449_572_573
  2. +
  3. Comparison of channels.inp length vs CSDP length:
      +
    1. Channels.inp length: length specified for DSM2 in the DSM2 + channels file above.
    2. +
    3. CSDP length: length calculated by the CSDP that will be used + to replace the 'Channels.inp length'.
    4. +
    5. % change: the change in length CSDP vs Channels.inp
    6. +
    +
  4. +
  5. CSDP Average width: For determining GIS volume + estimate validity–average width should be at least 3 times the DEM + grid size.
  6. +
  7. If CSDP Volume is significantly different from DSM2 Volume, that + would mean the effects of interpolation should be considered when + modifying cross-sections.
      +
    1. CSDP Volume: Channel volume calculated by CSDP for specified + elevation assuming no inter-channel interpolation. Not used for + comparison, but may be of interest to some.
    2. +
    +
  8. +
  9. Not used for comparison, but may be of interest to some.
      +
    1. CSDP Wetted Area: Wetted area calculated by CSDP for + specified elevation assuming no inter-channel interpolation. Not + used for comparison, but may be of interest to some.
    2. +
    3. CSDP Surface Area: Surface area calculated by CSDP for + specified elevation assuming no inter-channel interpolation.
    4. +
    5. CSDP Max Area Ratio: The maximum ratio of cross-sectional + areas within a channel using CSDP cross-sections. Important + for numerical stability. Max area ratios should be \< 2.0.
    6. +
    +
  10. +
  11. If CSDP Volume is significantly different from DSM2 Volume, that + would mean the effects of interpolation should be considered when + modifying cross-sections.
      +
    1. DSM2 Volume: Channel volume calculated at specified + elevation using virtual cross-sections from DSM2 output file. + Used for comparison with GIS volumes.
    2. +
    +
  12. +
  13. Not used for comparison, but may be of interest to some:
      +
    1. DSM2 Wetted Area: Wetted area calculated at specified + elevation using virtual cross-sections from DSM2 output file
    2. +
    3. DSM2 Surface Area: Surface area calculated at specified + elevation using virtual cross-sections from DSM2 output file
    4. +
    5. DSM2 Max Area Ratio: The maximum ratio of cross-sectional + areas within a channel using virtual cross-sections. + Important for numerical stability. Max area ratios should be + \< 2.0.
    6. +
    +
  14. +
  15. These results include valid and invalid values (see "2m Validity" + and "10m Validity" below), so these are probably not what you want + to use.
      +
    1. GIS 2m Max* Volume: The GIS calculated channel volume, + converted to ft3, using 2m DEM.
    2. +
    3. GIS 2m Max* Area: The GIS calculated 2d area, converted to + ft2, using 2m DEM.
    4. +
    5. GIS 10 Max* Volume: The GIS calculated channel volume, + converted to ft3, using 10m DEM.
    6. +
    7. GIS 10m Max* Area: The GIS calculated 2D area, converted to + ft2, using 10m DEM.
    8. +
    9. DSM2-2m Vol: The difference between the DSM2 virtual + cross-section volume and the 2m DEM volume.
    10. +
    11. DSM2-10m Vol: The difference between the DSM2 virtual + cross-section volume and the 10m DEM volume.
    12. +
    13. 2m Vol % diff: The % difference between the DSM2 virtual + cross-section volume and the 2m DEM volume.
    14. +
    15. 10m Vol % diff: The % difference between the DSM2 virtual + cross-section volume and the 10m DEM volume.
    16. +
    17. CSDP Avg Width: The average width of all the CSDP + cross-sections in a channel at the specified elevation.
    18. +
    +
  16. +
  17. 2m Width Ratio: the CSDP Avg Width / 2m.
  18. +
  19. 10m Width Ratio: the CSDP Avg Width / 10m.
  20. +
  21. 2m Validity: 2m DEM volume and area calculations will be + considered valid if a 2m DEM covers (or nearly covers) the entire + channel polygon, and the 2m Width Ratio >= 3.0.
  22. +
  23. 10m Validity: 10m DEM volume and area calculations will be + considered valid if the 10m Width Ratio >= 3.0. Coverage is + assumed to be complete for all channels.
  24. +
  25. Valid Values: These are the ones you want to use:
      +
    1. Valid 2m Vol: The value of GIS 2m Volume, if 2m + Validity==true, null otherwise.
    2. +
    3. Valid 10m Vol: The value of GIS 10m Volume, if 10m + Validity==true, null otherwise.
    4. +
    5. DSM2-Valid 2m Vol: The value of DSM2-2m Vol if 2m + Validity==true, null otherwise.
    6. +
    7. DSM2-Valid 10m Vol: The value of DSM2-10m Vol if 10m + Validity==true, null otherwise.
    8. +
    9. Valid 2m Vol % diff: The value of 2m Vol % diff if 2m + Validity==true, null otherwise.
    10. +
    11. Valid 10m Vol % diff: The value of 10m Vol % diff if + 10m Validity==true, null otherwise.
    12. +
    +
  26. +
  27. CSDP highest bottom elevation: The highest bottom elevation of + all the cross-sections within the channel. Can help identify + cross-sections that are likely to dry up.
  28. +
  29. CSDP XS with no points: The indices of the cross-sections in the + channel that have no points. These cross-sections should be removed + or edited.
  30. +
  31. CSDP XS within 500.0 feet: The indices of the cross-sections in + the channel that are within 500.0 feet of each other. This could + help identify duplicate cross-sections or unnecessary + cross-sections. 
  32. +
  33. These can help identify cross-sections that need to be adjusted to + improve Max Area Ratio.
      +
    1. CSDP XS with Min area: The index of the cross-section in the + channel that has the smallest area at the specified elevation
    2. +
    3. CSDP XS with Max area: The index of the cross-section in the + channel that has the largest area at the specified elevation
    4. +
    +
  34. +
  35. CSDP XS with duplicate stations: The indices of the + cross-sections in the channel that have duplicate station values. + These cross-section need to be fixed.
  36. +
  37. We no longer care about negative dConveyance, so these can + probably be ignored:
      +
    1. CSDP XS with -dK: The indices of the cross-sections in the + channel that have negative dConveyance at any elevation.
    2. +
    3. CSDP XS with -dK in intertidal zone: the indices of the + cross-sections in the channel that have negative dConveyance in + the intertidal zone. (intertidal zone is assumed to be limited + to the range -2.5 \< Z \< 17.5 ft NAVD88)
    4. +
    +
  38. +
+

*When calculating GIS results, some channels overlap more than one +DEM.  When this happens, only the largest values of Volume and 2D Area +are used, because they are assumed to be associated with the DEM that +covers a greater portion of the polygon. If the coverage is not +complete, the value should be invalidated visually in the "2m DEM +Validity" file.

+

Creating the network summary report:

+
    +
  1. Load a bathymetry file.
  2. +
  3. Load or create a network file.
  4. +
  5. Select Network->Reports→Network Summary Report
  6. +
  7. In the following dialog: 
    +
  8. +
  9. Either
      +
    1. click the "Load Dialog Values" button to populate the dialog + using values read from a file, OR
    2. +
    3. Populate the dialog one field at a time by clicking the "Select + File" buttons to specify
        +
      1. the channels.inp file (in the current DSM2 setup, this is + channel_std_delta_grid_NAVD-20121214.inp),
      2. +
      3. optionally a .hof file created by running DSM2 with + printlevel=5,
      4. +
      5. A string representing an array of channels to use for + aggregating results (for example, all the channels + representing Grizzly Bay). This string can contain lists of + channels for which polygons were created for the GIS CutFill + operation, or a custom list of channels.
      6. +
      7. A list of all files containing CutFill results from GIS,
      8. +
      9. A 2m DEM CutFill Validity file, which I created by visually + inspecting the DEM coverage of polygons.
      10. +
      11. An output path
      12. +
      +
    4. +
    +
  10. +
  11. The results will be written to a tab delimited .txt file specified + above. Import the file into Excel, specifying tab as a delimiter.
  12. +
  13. After the results are written, another window will appear containing + graphs of the results.
  14. +
  15. Save results if desired.
  16. +
+

Here is the current network summary report:

+

There are many rows above the table which define the various quantities. +You may want to hide these rows when using the spreadsheet.

+

+

Attachments:

+

+image2019-3-26_14-13-18.png +(image/png)
+ +image2019-3-25_16-9-52.png +(image/png)
+ +image2019-3-25_16-9-41.png +(image/png)
+ +networkSummary.txt (text/plain)
+ +image2019-3-25_16-8-30.png +(image/png)
+ +networkSummary20190308.xlsx +(application/vnd.openxmlformats-officedocument.spreadsheetml.sheet)
+ +image2019-1-7_14-32-27.png +(image/png)
+ +networkSummaryWithoutHof.xltx +(application/vnd.openxmlformats-officedocument.spreadsheetml.template)
+ +networkSummaryWithHof.xltx +(application/vnd.openxmlformats-officedocument.spreadsheetml.template)
+ +networkSummaryWithoutHof.xlsx +(application/vnd.openxmlformats-officedocument.spreadsheetml.sheet)
+ +networkSummaryWithHof.xlsx +(application/vnd.openxmlformats-officedocument.spreadsheetml.sheet)
+ +networkSummaryWithoutHof.txt +(text/plain)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/CSDP_Tutorial/index.html b/gis/CSDP_Tutorial/index.html new file mode 100644 index 00000000..c4109606 --- /dev/null +++ b/gis/CSDP_Tutorial/index.html @@ -0,0 +1,543 @@ + + + + + + + + + + + + + + + + + + CSDP Tutorial - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

CSDP Tutorial

+

Brad Tom (developer of CSDP) gave a +presentation on CSDP in +Feburary 2009. A recording of this presentation is available as below.

+

+

Updates

+

In version 8.x the irregular xsection file format has changed. To change +this information to the new format run the script under +vista/scripts/dsm2/csdp_geom_converter.py with the location of the +directory as input

+
vscript scripts/dsm2/csdp_geom_converter.py <dir_containing_csdp_calculated_xsections>
+
+
+
+This will create a irregular_xsections_dsm2.inp which will contain all the cross sections in that directory in the new 8.x format
+
+ +

Attachments:

+

+csdpWebexClass.pdf +(application/pdf)
+ +csdpWebexClass.ppt +(application/vnd.ms-powerpoint)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/Creating_DSM2_v8.2_GIS_grid_map/index.html b/gis/Creating_DSM2_v8.2_GIS_grid_map/index.html new file mode 100644 index 00000000..390e29b3 --- /dev/null +++ b/gis/Creating_DSM2_v8.2_GIS_grid_map/index.html @@ -0,0 +1,611 @@ + + + + + + + + + + + + + + + + + + Creating DSM2 v8.2 GIS grid map - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Creating DSM2 v8.2 GIS grid map

+

Creating shapefiles

+

The existing CSDP network file for the DSM2 8.2 network that is +incomplete and contains errors. The network file for DSM2 v8.3 is +accurate, but contains some additional channels and nodes that were +moved. The easiest way to create shapefiles for the DSM2 v8.2 grid is to +modify the network and landmark (nodes) for the 8.3 grid.

+

Verification

+

The goal is to display the pdf gridmap as a background image in ArcGIS +to verify that all the channel and node number are correct and in the +correct locations.

+
    +
  1. The following command (using ghostscript in Cygwin) creates a tif + file from the dsm2 pdf grid map file:
    + gs -q -dNOPAUSE -sDEVICE=tiffg4 -sOutputFile=gridmappdf.tif + "DSM2_Grid2.0 (1).pdf" -c quit
  2. +
  3. Create a copy of the tif file, with "marsh" in the filename. This is + because the pdf gridmap has the Suisun Marsh disconnected from the + delta and printed in a different scale.
  4. +
  5. Next step is to identify a few landmarks that are easily + identifiable on both the pdf grid map and on the basemap in ArcGIS. + I chose 3 points: one in the north delta, near the confluence, and + in the south delta. 
  6. +
  7. In QGIS, select Plugins-Manage and Install Plugins:
    +
  8. +
  9. Search for "GDAL", check the box "Georeferencer GDAL", then click + close: 
    +
  10. +
  11. Select Raster-Georeferencer: 
    +
  12. +
  13. Click the Open Raster + button '
  14. +
  15. Select the pdf file.
  16. +
  17. Select Settings-Transformaion + Settings
  18. +
  19. Use the following settings, including an output + filename: 
  20. +
  21. Click on a point in the map, and enter UTM coordinates, then click + OK: 
  22. +
  23. When you have specified coordinates for all your points, click the + start georeferencing button. A tif file will be created, which you + can load into ArcGIS.
  24. +
  25. In ArcGIS, adjust the layer + transparency. 
  26. +
  27. Now you can easily compare the pdf gridmap to the GIS data.
  28. +
+

Attachments:

+

+image2020-5-12_9-25-2.png +(image/png)
+ +image2020-5-12_7-18-51.png +(image/png)
+ +image2020-5-12_7-17-47.png +(image/png)
+ +image2020-5-12_7-17-15.png +(image/png)
+ +image2020-5-12_7-16-53.png +(image/png)
+ +image2020-5-12_7-15-45.png +(image/png)
+ +image2020-5-12_7-14-51.png +(image/png)
+ +image2020-5-12_7-14-8.png +(image/png)
+ +image2020-5-12_7-12-43.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/Cross-Section_Development_Program_CSDP_/index.html b/gis/Cross-Section_Development_Program_CSDP_/index.html new file mode 100644 index 00000000..9aaf22ea --- /dev/null +++ b/gis/Cross-Section_Development_Program_CSDP_/index.html @@ -0,0 +1,584 @@ + + + + + + + + + + + + + + + + + + Cross-Section Development Program (CSDP) - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Cross-Section Development Program (CSDP)

+

Introduction

+

Bathymetry data is used by CSDP to draw cross-sections which are then +converted to DSM2-Hydro cross sectional input. Furthermore CSDP provides +the channel and cross section locations in GIS projection of NAD 27, +NGVD 29

+

CSDP was developed by Brad Tom in the 1990s, and has recently been +updated for use in the DSM2 GIS Reference Project.

+

How to get started in using CSDP?

+

The CSDP Manual is available here Cross-Section Development Program +(CSDP)

+

A hands on tutorial and presentation given by Brad Tom +in 2009 is a good reference resource.

+

In version 8.x the irregular xsection file format has changed. To change +this information to the new format run the script under +vista/scripts/dsm2/csdp_geom_converter.py with the location of the +directory as input

+
vscript scripts/dsm2/csdp_geom_converter.py <dir_containing_csdp_calculated_xsections>
+
+

This will create a irregular_xsections_dsm2.inp which will contain all +the cross sections in that directory in the new 8.x format

+

CSDP will now create DSM2 geometry input in both the original multi-file +format used by older versions of DSM2, and the newer single file format, +so the above script is no longer needed.

+

An ArcGIS extension was developed as a modern replacement for CSDP by +Tom Heinzer. However this has not been available publicly yet and the +grid and cross sections are still being developed in this tool.

+

CSDP conversion to ArcGIS

+

Using WKT (Well Known Text) format and QGIS (add delimited text layer) +capabilities, the information from CSDP files was converted to +shapefiles

+

node.cdl contained the information about the nodes in CSDP corresponding +to DSM2 node locations. 

+

mjtstrm_vec.cdo contained the outlines of levees and other features +which are now redundant given the availability of maps (raster based +tile layers) from google, open street etc.

+

05jul2000.cdn is the channel network which included the centerline of +channels and the cross-section created by looking at bathymetry data +(those are available separately as large files)

+

delta_2009Calib.cdn is the channel network for persumably the 2009 +calibration.

+

The files are available on the shared drive +(\cnrastore-bdo\Delta_Mod\Share\maps) from both 2000 +(CSDP_Converted_2000Calib.qgs) and 2009 (CSDP_Converted_2009Calib.qgs) +calibrations

+

For more information on DSM2 gridmaps and how they relate to CSDP files, +see DSM2 Geo referenced grid.

+
    +
  • Write up needed on CSDP and its successor the ArcGIS extension 
  • +
  • Write up needed using approach by Ines using ArcGIS and python + scripts 
  • +
+

Attachments:

+

+CSDP_vs_Channels_inp_Lengths.xlsx +(application/vnd.openxmlformats-officedocument.spreadsheetml.sheet)
+ +Clifton_court_2011.png +(image/png)
+ +Clifton_court_1990.png +(image/png)
+ +RSAC092_2011.png (image/png)
+ +RSAC092_1990.png (image/png)
+ +RSAN018_2011.png (image/png)
+ +RSAN018_1990.png (image/png)
+ +RSAN007_2011.png (image/png)
+ +RSAN007_1990.png (image/png)
+ +csdpmanual.pdf (application/pdf)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/DSM2_Geo_referenced_grid/index.html b/gis/DSM2_Geo_referenced_grid/index.html new file mode 100644 index 00000000..a876e678 --- /dev/null +++ b/gis/DSM2_Geo_referenced_grid/index.html @@ -0,0 +1,716 @@ + + + + + + + + + + + + + + + + + + DSM2 Geo referenced grid - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 Geo referenced grid

+

Background

+

DSM2 input specifies geographically based information such as channel +lengths and cross section distances from the upstream node. However the +geographically referenced node locations or channel outlines are not +directly needed for hydrodynamic calculations. 

+

In addition to this the cross sections in DSM2 are based on bathymetry +data that is used to generate elevation to cross-sectional property +information. 

+

Even though this information is not needed directly it is very important +to keep the geographically referenced information in sync with the input +parameters such as channel length and cross section locations in DSM2 +input

+

There have been different areas for which the grid was developed over +time, the Sacramento-San Joaquin Delta, the San Joaquin River Extension +and the Aqueduct grid. The one that is most commonly referred to as DSM2 +grid is the Sacramento-San Joaquin Delta

+

Sacramento - San Joaquin Delta grid

+

The original DSM2 grid was based on hand calculated distances based on +USGS Quad maps of the Delta (circa 1990). These were done on paper maps +and the original information has been lost though.

+

A pdf version of this grid +based on presumably this information is often found in circulation. The +grid map contained in this pdf was originally created using AutoCAD. +However the node locations in this pdf version are clearly not in the +stream at many times and certainly not geo-referenced to any projection +system. Nodes and channels were not always placed very carefully, +presumably because the map was primarily used to identify approximate +locations of nodes, channels, and stations, and channel connectivity.

+

In the late 1990s or early 2000s, a paper copy of the grid was placed on +a digitizing tablet, and nodes were digitized by clicking on each one +(Amy Little might have done this). The result was a file containing +approximate UTM coordinates of each node. This file was used by the DSM2 +Cross-Section Development Program (CSDP) to create representations of +DSM2 channels and cross-sections. 

+

CSDP was developed by Brad Tom and Nicky Sandhu based +upon specifications written by Ralph Finch in 1998 to derive the cross +sections from bathymetry data, which at the time were mostly single beam +soundings of depth that were available over many decades in the Delta. +This tool is the basis of the current grid in 2000 and the recalibrated +grid in 2009. As a by product of this effort the node locations and +channel outlines were stored in UTM coordinates. Originally, these were +not used directly to derive channel lengths, but they were used +indirectly in determining the distance of a cross-section from the +upstream node of a channel. The DSM2 GIS Reference project, which began +in 2018, will used CSDP centerlines to determine channel lengths.

+

CSDP was developed pre ArcGIS and with ArcGIS now +being fairly standards in DWR there is a need to provide this +geographical information in ArcGIS format. In recent years, Jane +Schafer-Kramer created an ArcGIS version of this +grid. Jane developed, under Ralph Finch's guidance, an ArcGIS +referenced grid by manually putting nodes at the closest location based +on the pdf version of the grid. Again the channel lengths from these +would not match either CSDP or the original grid as it is an independent +manual effort. Furthermore there would be a mismatch to the location of +the cross-section positions. 

+

In 2012, Tom Heinzer was contracted to develop an ArcGIS based extension +to allow a user to develop cross-sections from DEM which in turn is +based on interpolations of depth sounding data. This again is a work in +progress and cannot import the current cross-sectional profiles +available in CSDP.

+

In 2017, CSDP grid data for the 2009 calibration was imported into +ArcGIS and along with it the +channel outlines and node locations. The channel outlines in ArcGIS were +used to calculate lengths for the channels and these were then compared +to the current grid.  There were many mismatches discovered and these +should be addressed in future efforts

+

2009 Grid

+

The 2009 Grid is used for DSM2 v8.2. It is similar to the pdf gridmap, +but it includes some upper sacramento river changes. 

+

The node locations and the associated channel network lengths do have a +match with the 2000 calibration files (spot checked). However, the 2009 +CH2MHill mini calibration adjusted node +positions, channel lengths, and cross-sections for channels 412-418.  +The changes made in these channels were incorporated into DSM2, and are +included in DSM2 versions as recent as v8.2.0, which is the current +release as of 10/2019. However, we did not get any CSDP or GIS data from +CH2MHill.  Node locations were reverse engineered using the mini +calibration lengths, starting with the common node position from channel +412. The overall sum of the length (reach 412-418) was unchanged and +this assumption allows for a reasonable reverse engineering effort. 

+

This reverse engineered effort is available on the shared drive as +shapefiles \cnrastore-bdo\Delta_Mod\Share\maps\csdp_2009_calib_converted\CSDP_Channels_Adjusted_MiniCalib.shp +(channels) +and \cnrastore-bdo\Delta_Mod\Share\maps\csdp_2009_calib_converted\CSDP_Nodes_Adjusted_MiniCalib.shp. The +Nodes shapefile is missing some files, and cannot be loaded into ArcGIS. +It was loaded into OpenJUMP, and exported +to \cnrastore-bdo\Delta_Mod\Share\maps\csdp_2009_calib_converted\CSDP_Nodes_Adjusted_MiniCalib_Recovered.shp. +These files are also available +in \nasbdo\Modeling_Data\DSM2_GIS_CSDP_gridmaps\GISGridmapV8.2.

+

These layers are the closest approximation to the grid used for DSM2 +v8.2. 

+

There is a large discrepancy in the channel length for channel 422 +between cross channel and northern head of Georgiana slough. CSDP and +ArcGIS calculations put it at 3300 feet while in DSM2 input files it is +5300 feet. This is not an isolated incidence, there are many others as +documented in this CSDP_vs_Channels_inp_Lengths.xlsx

+

DSM2 v8.1 and v8.2 grid

+

For version 8.1 and 8.2, use this grid for referencing DSM2 elements +approximately. The channels and nodes layers are incomplete, not very +accurate, and contain errors.

+

Shapefiles are available in +\nasbdo\Modeling_Data\DSM2_GIS_CSDP_gridmaps\GISGridmapV8.2\

+

DSM2 v8.3 grid

+

The 2019 grid is used for DSM2 v8.3, which is under development, and +will be the result of the DSM2 GIS Reference Project.

+

Three shapefiles (located here: +\nasbdo\Modeling_Data\DSM2_GIS_CSDP_gridmaps\GISGridmapV8.3) each have +been created from the CSDP network (channel centerlines) and landmark +(nodes) data for both the 2009 calibration (DSM2 V8.2) and the 2019 +calibration (DSM2 V8.3). The shapefiles were created by exporting +network and landmark data from the CSDP to WKT files and importing the +results into QGIS, then saving to shapefiles. This is intended to be a +first step toward creating a +georeferenced grid map. Shapefiles are available in +\nasbdo\Modeling_Data\DSM2_GIS_CSDP_gridmaps\GISGridmapV8.3\

+
    +
  1. dsm2_channels_centerlines contains the channel centerlines as + created in the CSDP. Many channels have endpoints that are
    + not located at the node; this was done to improve the accuracy of + the DSM2 channel volume. Also, many centerlines do not follow the + actual channel centerline perfectly.
  2. +
  3. dsm2_channels_straightlines contains straight lines connecting + the two endpoints of each CSDP centerline.
  4. +
  5. dsm2_nodes contains the CSDP landmark data. The node locations + were previously not very accurate; they have now been corrected.
  6. +
  7. dsm2_boundary_flow_nodes contains points placed at the locations + of nodes where boundary flows are applied.
  8. +
  9. dsm2_boundary_stage_node contains a point placed at the location + of the node where the boundary stage is applied
  10. +
  11. dsm2_gates contains points placed at the approximate location of + the channel centerline near each gate. In DSM2, gates are located at + the ends of channels. The points in this layer is intended to + represent the approximate location in DSM2, and not necessarily the + physical location of the gate.
  12. +
+

Future Directions

+

We need a georeferenced gridmap. It should have the following features:

+
    +
  1. Display nodes as circles with numbers inside.
  2. +
  3. Display straightline channels with numbers, and an arrow indicating + positive flow direction.
  4. +
  5. Display channels derived from CSDP centerlines, with numbers, and an + arrow indicating positive flow direction. 
  6. +
  7. straightline channels and CSDP centerline channels should be + different colors.
  8. +
  9. Useful for printing on a plotter.
  10. +
  11. Easy to modify when CSDP node locations or channels change. 
  12. +
  13. +

    Good contrast with background, so we can easily determine + connectivity and read all the numbers. 

    +
  14. +
  15. +

    Michael Mehrdadi is working on an ArcGIS gridmap using the + shapefiles for the 2019 grid. 

    +
  16. +
  17. Hans Kim is working on a Google Earth gridmap. This will likely be a + useful training tool, and may have other uses. 
      +
    • The current version (as of 10/31/2019) of the gridmap is found + here: DSM2_Grid_191029.kml. + It can be opened with Google Earth or imported into Google Map.
    • +
    • Updates will be made as new shapefiles become available.
    • +
    +
  18. +
+

Attachments:

+

+DSM2_Grid_191029.kml +(application/octet-stream)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/DSM2_Sacramento_San-Joaquin_Delta_Grid/index.html b/gis/DSM2_Sacramento_San-Joaquin_Delta_Grid/index.html new file mode 100644 index 00000000..48e045c6 --- /dev/null +++ b/gis/DSM2_Sacramento_San-Joaquin_Delta_Grid/index.html @@ -0,0 +1,542 @@ + + + + + + + + + + + + + + + + + + DSM2 Sacramento San-Joaquin Delta Grid - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 Sacramento San-Joaquin Delta Grid

+

Introduction

+

The main area of application for DSM2 is the Sacramento San-Joaquin +Delta. This grid was developed over the years. A commonly used version +is available as a PDF. +Vamsi Sridharan made a stitched version with Suisun bay from the same +PDF available here.

+

ArcGIS version

+

In recent years, Jane Schafer-Kramer created an +ArcGIS version of this +grid. This map representation will be refined and available with the +ArcGIS X-section editing tool that is now in beta testing.

+

To view channels colored by mannings or dispersion, channels.inp was +imported (from DSM2 v 8.1.2) as a table. This was then joined with "DSM2 +Channels" table in ArcGIS on the channel number field. Then symbology +can be used to display mannings attribute in the joined table. The +product is available here \nasbdo\Delta_Mod\Share\maps\Delta Stations +with DSM2 Grid Mannings N Colored.mpk

+

Attachments:

+

+DSM2_Grid2.0_updated.pdf +(application/pdf)
+ +Delta_Stations_with_DSM2_Grid.mpk +(application/octet-stream)
+ +DSM2_Grid2.0.pdf +(application/pdf)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/Exporting_CSDP_Information_into_GIS/index.html b/gis/Exporting_CSDP_Information_into_GIS/index.html new file mode 100644 index 00000000..b0e1718b --- /dev/null +++ b/gis/Exporting_CSDP_Information_into_GIS/index.html @@ -0,0 +1,515 @@ + + + + + + + + + + + + + + + + + + Exporting CSDP Information into GIS - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Exporting CSDP Information into GIS

+

Background

+

CSDP contains channel outline, cross-section locations, and +cross-section profile as well as node locations. These are referenced in +NAVD88 vertical datum and NAD83 horizontal datum in UTM Zone 10N +projection.

+

Methods

+

QGIS is a tool that can import WKT +((https://en.wikipedia.org/wiki/Well-known_text) format +into a text based layer that can be then exported ArcGIS. 

+

There is Java code available for 

+
    +
  1. Exporting CSDP channel outlines to WKT. https://github.com/CADWRDeltaModeling/dsm2-vista/blob/master/dsm2-input-model/src/gov/ca/dsm2/input/csdp/CSDPChannelNetworkToWKT.java
  2. +
  3. +

    Exporting CSDP node locations to WKT. https://github.com/CADWRDeltaModeling/dsm2-vista/blob/master/dsm2-input-model/src/gov/ca/dsm2/input/csdp/CSDPNodeCDLToWKT.java

    +
  4. +
  5. +

    Nicky SandhuNeed to make these standalone generic utilities to be + run from command line

    +
  6. +
  7. Brad Tomcan you take a look at this code and see if it can be + integrated into CSDP easily
  8. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/Exporting_Channel_Lengths_from_CSDP_Network_file/index.html b/gis/Exporting_Channel_Lengths_from_CSDP_Network_file/index.html new file mode 100644 index 00000000..171f6adc --- /dev/null +++ b/gis/Exporting_Channel_Lengths_from_CSDP_Network_file/index.html @@ -0,0 +1,474 @@ + + + + + + + + + + + + + + + + + + Exporting Channel Lengths from CSDP Network file - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Exporting Channel Lengths from CSDP Network file

+

CSDP currently has the ability to output just the channel ids and +lengths.

+
    +
  1. Use the Network | Export Options menu item to select only channel + lengths output in station elevation format. 
  2. +
  3. Then use Network | Save As menu to save to file which should only + then have channel id and length in output.
  4. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/Extracting_Bathymetry_Data_From_An_Irregularly_Shaped_Region/index.html b/gis/Extracting_Bathymetry_Data_From_An_Irregularly_Shaped_Region/index.html new file mode 100644 index 00000000..a3818105 --- /dev/null +++ b/gis/Extracting_Bathymetry_Data_From_An_Irregularly_Shaped_Region/index.html @@ -0,0 +1,515 @@ + + + + + + + + + + + + + + + + + + Extracting Bathymetry Data From An Irregularly Shaped Region - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Extracting Bathymetry Data From An Irregularly Shaped Region

+

Using the CSDP, create a new centerline using the Centerline-Create +menu item.

+

The name of the centerline does not matter.

+

Add points to the centerline until it outlines the data you want to +extract. See example below. The endpoints do not need to be in the same +place. A polygon will be created whose vertices are all of the +centerline points, so the first and last points will be connected. Save +the network file.

+

I used a simple Java program called +ExtractShipChannelLeveesFromYoloBypassDEM, which uses hard-coded +filenames for both the input (network file and the bathymetry file) and +the output (bathymetry file). Eventually this code will be added to the +Bathymetry menu in the CSDP, which will export the data surrounded +by a polygon created from the selected centerline to a specified +filename.

+

+

Attachments:

+

+image2018-12-3_13-53-22.png +(image/png)
+ +image2018-12-3_13-53-12.png +(image/png)
+ +image2018-12-3_13-52-46.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/Importing_Digital_Elevation_Maps_DEMs_into_CSDP/index.html b/gis/Importing_Digital_Elevation_Maps_DEMs_into_CSDP/index.html new file mode 100644 index 00000000..1f61fb27 --- /dev/null +++ b/gis/Importing_Digital_Elevation_Maps_DEMs_into_CSDP/index.html @@ -0,0 +1,587 @@ + + + + + + + + + + + + + + + + + + Importing Digital Elevation Maps (DEMs) into CSDP - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Importing Digital Elevation Maps (DEMs) into CSDP

+

Background

+

CSDP was developed in the late 1990s and can only consume point features +in its custom format. This document outlines the process of converting +modern DEMs in raster form into files that CSDP can use to bring in the +latest bathymetry information that is developed in modern tools such as +ArcGIS

+

CSDP File Format

+

CSDP supports bathymetry data as point features in XYZ format along with +columns for year and source of data. In addition CSDP allows for a +metadata defining the projection system (it only supports two UTM NAD83 +and NAD27)

+

Below is a sample header from a CSDP bathymetry file

+
;HorizontalDatum:  UTMNAD83
+;HorizontalZone:   10
+;HorizontalUnits:  Meters
+;VerticalDatum:    NAVD88
+;VerticalUnits:    USSurveyFeet
+;Filetype: bathmetry
+;NumElements: 1544252
+563970.000000000 4234180.000000000 112.7323 2012 SF_DEM
+563990.000000000 4234180.000000000 117.6413 2012 SF_DEM
+
+

Steps

+
    +
  1. +

    Use Arc Toolbox > Conversion Tools > From Raster > Raster to + ASCII to output DEM as text file. 

    +

    For large dems, click on the Environments in the dialog box in step +1 and make sure the "Output Coordinates" are in NAD83, zone 10, in +meters in UTM projection, and the vertical datum should be NAVD88 in +meters. and that the "Processing Extent" is "Same as Display". Zoom +in to the relevant portion before running the tool in step1 and that +should limit the DEM output to just the viewable area.

    +
  2. +
  3. +

    Use this program: ASCIIGridToCSDPConverter \<raster ascii + filename> \<prn output filename>

    +
  4. +
  5. +

    Open \<prn output filename> in CSDP

    +
  6. +
  7. +

    You can also use the CSDP:

    +

    + + + + + + +
    +

    Select Bathymetry-Import Bathymetry from ASCII Raster

    +

    +
    +

    Fill in the dialog. If dataset is more dense than you need, you can +enter a factor greater than 1

    +

    +

    +
  8. +
+

Attachments:

+

+image2019-6-14_11-20-44.png +(image/png)
+ +image2019-6-14_11-20-28.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/Merging_multiple_versions_of_network_files/index.html b/gis/Merging_multiple_versions_of_network_files/index.html new file mode 100644 index 00000000..bb4dd955 --- /dev/null +++ b/gis/Merging_multiple_versions_of_network_files/index.html @@ -0,0 +1,556 @@ + + + + + + + + + + + + + + + + + + Merging multiple versions of network files - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Merging multiple versions of network files

+

One way to merge changes from multiple users is to use a file comparison +tool such as WinMerge. However, if the users did not start with +identical versions of the network file, this won't work.

+

I have added a feature to the CSDP (Network-Save Specified Channels), +which helps merge changes from network files submitted by multiple users +who did not start with the same version of the network file.

+

Here's how I use it to merge changes into an existing network file:

+
    +
  1. Get a list of modified centerlines for the new network file.
  2. +
  3. Enter the list into Excel. 
  4. +
  5. Copy the list, and paste-special-transpose.
  6. +
  7. In the CSDP, load the existing network file, which you might refer + to as the current master version. 
      +
    1. Select Network-Save Specified Channels.
    2. +
    3. In the file selector dialog, enter a filename for the new master + version. 
    4. +
    5. Go back to Excel, and copy the transposed list of centerline + numbers. Past them into the dialog that appears (below) in the + Channel Numbers field. (The list will be tab delimited, which is + fine). You may not be able to see all the centerline names in + the text field (I'll have to work on that), but it will work.
    6. +
    7. Before clicking OK, click the "Don't export specified channels" + checkbox. Make sure this option is selected before you click + OK. When you click OK, CSDP will create a new network file + containing all centerlines EXCEPT the ones you specified.
      +
    8. +
    +
  8. +
  9. Now load the new network file.
      +
    1. Select Network-Save Specified Channels.
    2. +
    3. In the file selector dialog, enter a filename for a temporary + network file.
    4. +
    5. Go back to Excel, and copy the transposed list of centerline + numbers. Paste them into the dialog that appears (below) in the + Channel Numbers field. (The list will be tab delimited, which is + fine). You may not be able to see all the centerline names in + the text field (I'll have to work on that), but it will + work.
    6. +
    7. Before clicking OK, make sure the "Don't export specified + channels" checkbox is NOT selected. When you click OK, CSDP + will create a new network file containing only the centerlines + you specified. 
    8. +
    +
  10. +
  11. Use a text editor to copy the contents (excluding the headers at the + top) of the new temporary network file into the new master network + file. Update the numElements field in the header of the new file. + The value should be the sum of the values from the two files.
  12. +
+

Attachments:

+

+image2018-12-21_12-8-47.png +(image/png)
+ +image2018-12-21_12-0-36.png +(image/png)
+ +image2018-12-21_11-49-11.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/Schematics_and_Boundaries/index.html b/gis/Schematics_and_Boundaries/index.html new file mode 100644 index 00000000..e8552eac --- /dev/null +++ b/gis/Schematics_and_Boundaries/index.html @@ -0,0 +1,722 @@ + + + + + + + + + + + + + + + + + + Schematics and Boundaries - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Schematics and Boundaries

+

CalSIM II schematic is retrieved from BDO Central Valley modeling +website

+

http://baydeltaoffice.water.ca.gov/modeling/hydrology/CalSim/Downloads/CalSimDownloads/BST_CALSIMII_schematic_040110.pdf

+

+

DSM2 (v812) schematic is retrieved from DSM2 release package +dsm2\documentation

+

+

DSM2 inputs are retrieved from CalSIM output

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+
DSM2 name
+
DSM2 nodeCalSIM IICalSIM 3

BOUNDARY_FLOW

+


+

+


+

calaveras

FLOW-CHANNEL

+


+

21C508_R514C_CLV004
cosumnes446C501C_CSM005
yolo316C157C_CSL005
sacFLOW330C169_D168B_D168CC_SAC041
vernalis17C639_R644C_SJR070
moke

FLOW-INFLOW

447I504

C_MOK022

+

SR_60N_MOK019

+

SR_26S_MOK014


+

+

+

+

+

+

SOURCE_FLOW

AntiochWW

FLOW-DELIVERY

+            
 46D406B
+
COSMA133D514A D_SJR028_WTPDWS
COSMA233D514B
+
FRWP_EBMUD332D168B
+
FRWP_SCWA332D168C
+
northbay273D403B C_CSL004B
nb_fvb273D403D
+
nb_sol273D403C
+
ccc206D408_RSD408
CCWDVC191D408_VC
+
oldr03480D408_OR
+
vallejo320D403A
+
cvp
+
 181D418_TD_ADJ D_OMR028_DMC000
SOURCE_FLOW_RESERVOIRswpFLOW-DELIVERYclifton_courtD419_TD_ADJ D_OMR027_CAA000

+

+

+

+

+

+
NODE_CONCENTRATIONvernalisSALINITY-EC17VERNWQFINALVERNWQFINAL
+ +

Attachments:

+

+DSM2_Grid2.0.pdf +(application/pdf)
+ +BST_CALSIMII_schematic_040110.pdf +(application/pdf)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/gis/Symbology/index.html b/gis/Symbology/index.html new file mode 100644 index 00000000..8f6843d9 --- /dev/null +++ b/gis/Symbology/index.html @@ -0,0 +1,515 @@ + + + + + + + + + + + + + + + + + + Symbology - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Symbology

+

The following symbology is needed:

+
    +
  1. Arrows on or next to straight line channels and centerline channels.
  2. +
  3. Nodes displayed as circles with numbers in the middle.
      +
    1. In ArcGIS Pro, change the symbol to a circle (double click on + the symbol in the contents pane) and set the circle size to 20 + pt, and the font to Tahoma 8. Select Enable scale-based sizing.
    2. +
    +
  4. +
  5. Gates displayed using symbol similar to that used in the pdf grid + map. Using Meteorology-Fog, Light.
  6. +
+

Channel Arrows

+

The direction of arrows created using symbology is determined by the +order in which the points are defined in each line. Since the channel +lines (both straight and centerlines) are derived from CSDP data, the +points should always be in order from upstream to downstream.

+

To modify the symbol:

+
    +
  1. double-click on the symbol below the layer name (see image + below).
  2. +
  3. In the Symbology Panel, under Gallery, select "Arrow Right + Middle". 
  4. +
  5. Then click on Properties, set the color, and set line width to 15 pt + (or whatever you want, and select "Enable scale-based sizing".
  6. +
+

Attachments:

+

+image2020-5-14_15-15-14.png +(image/png)
+ +image2020-5-14_15-14-8.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/images/dsm2-icon_32-24.png b/images/dsm2-icon_32-24.png new file mode 100644 index 00000000..03f680e4 Binary files /dev/null and b/images/dsm2-icon_32-24.png differ diff --git a/images/icons/bullet_blue.gif b/images/icons/bullet_blue.gif new file mode 100644 index 00000000..25bfa0cf Binary files /dev/null and b/images/icons/bullet_blue.gif differ diff --git a/images/icons/contenttypes/home_page_16.png b/images/icons/contenttypes/home_page_16.png new file mode 100644 index 00000000..32888f65 Binary files /dev/null and b/images/icons/contenttypes/home_page_16.png differ diff --git a/images/icons/emoticons/smile.svg b/images/icons/emoticons/smile.svg new file mode 100644 index 00000000..b6d9f436 --- /dev/null +++ b/images/icons/emoticons/smile.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/images/icons/wait.gif b/images/icons/wait.gif new file mode 100644 index 00000000..085ccaec Binary files /dev/null and b/images/icons/wait.gif differ diff --git a/index.html b/index.html new file mode 100644 index 00000000..89221b7e --- /dev/null +++ b/index.html @@ -0,0 +1,551 @@ + + + + + + + + + + + + + + + + + + + + DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2

+

Overview:

+

The Delta Simulation Model II (DSM2) is a one-dimensional mathematical +model for dynamic simulation of one-dimensional hydrodynamics, water +quality and particle tracking in a network of riverine or estuarine +channels. DSM2 can calculate stages, flows, velocities, mass transport +processes for conservative and non-conservative constituents including +salts, water temperature, dissolved oxygen, and trihalomethane formation +potential, and transport of individual particles. DSM2 thus provides a +powerful simulation package for analysis of complex hydrodynamic, water +quality, and ecological conditions in riverine and estuarine systems.

+

DSM2 currently consists of three modules, all of which come with the +current distribution: HYDRO, QUAL, and PTM. HYDRO simulates +one-dimensional hydrodynamics including flows, velocities, depth, and +water surface elevations. HYDRO provides the flow input for QUAL and +PTM. QUAL simulates one-dimensional fate and transport of conservative +and non-conservative water quality constituents given a flow field +simulated by HYDRO. PTM simulates pseudo 3-D transport of neutrally +buoyant particles based on the flow field simulated by HYDRO. PTM has +multiple applications ranging from visualization of flow patterns to +simulation of discrete organisms such as fish eggs and larvae.

+

DSM2 is currently in version 8.2.2. Please send comments to Min Yu +minyu@water.ca.gov.

+

For more information

+

DSM2 Learning Series
+DSM2 source code GitHub repository

+
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/installation/index.html b/installation/index.html new file mode 100644 index 00000000..b03448f1 --- /dev/null +++ b/installation/index.html @@ -0,0 +1,481 @@ + + + + + + + + + + + + + + + + + + + + + + Installation - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Installation

+

How to install DSM2 for Windows

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/index.html b/manual/index.html new file mode 100644 index 00000000..0f71ed0d --- /dev/null +++ b/manual/index.html @@ -0,0 +1,492 @@ + + + + + + + + + + + + + + + + + + + + + + Manual (archive) - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + + + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Boundary_Flow/index.html b/manual/reference/Boundary_Flow/index.html new file mode 100644 index 00000000..c5193d8c --- /dev/null +++ b/manual/reference/Boundary_Flow/index.html @@ -0,0 +1,652 @@ + + + + + + + + + + + + + + + + + + Boundary Flow - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Boundary Flow

+

Overview:

+

Boundary Flows are boundary conditions at nodes where flow is known. +This view defines these time series and assigns time series data to +them.

+

Tables:

+

Example

+
# Description:
+# Historical boundary flows to Delta
+BOUNDARY_FLOW
+NAME      NODE SIGN FILLIN   FILE          PATH                                                     
+calaveras   21    1   last   ${BNDRYINPUT} /FILL+CHAN/RCAL009/FLOW//1DAY/${HISTFLOWVERSION}/         
+cosumnes   446    1   last   ${BNDRYINPUT} /FILL+CHAN/RCSM075/FLOW//1DAY/${HISTFLOWVERSION}/         
+moke       447    1   last   ${BNDRYINPUT} /FILL+CHAN/RMKL070/FLOW//1DAY/${HISTFLOWVERSION}/         
+north_bay  273   -1   last   ${BNDRYINPUT} /FILL+CHAN/SLBAR002/FLOW-EXPORT//1DAY/${HISTFLOWVERSION}/ 
+sac        330    1   last   ${BNDRYINPUT} /FILL+CHAN/RSAC155/FLOW//1DAY/${HISTFLOWVERSION}/         
+vernalis    17    1   last   ${BNDRYINPUT} /FILL+CHAN/RSAN112/FLOW//1DAY/${HISTFLOWVERSION}/         
+yolo       316    1   last   ${BNDRYINPUT} /FILL+CHAN/BYOLO040/FLOW//1DAY/${HISTFLOWVERSION}/        
+END
+
+

BOUNDARY_FLOW

+

The Boundary Flow table defines flow boundary conditions by giving them +names and associating them with a node. The table also assigns a time +series to the boundary condition. Boundary Flow is a top-level layered +table.

+

Field Descriptions

+
NAME
+

Name assigned to the source. This is the identifier of the boundary and +is referred to elsewhere in the input system. If you assign water +quality you will use the same name in order to match concentration to +flow.

+
NODE
+

Node number at which the source is applied.

+
SIGN
+

Forces the time series to be a source or a sink. Positive values are +normally associated with a source, but the data (especially sinks such +as agricultural diversions) are sometimes measured in absolute flow. Use +1 to force the value to be a positive source or -1 to interpret values +as a sink.

+
FILLIN
+

Method for filling in data if the time step of the assigned series is +coarser than the time step of the model. See fillin types

+
FILE
+

DSS or text file in which data are stored. Use consistent case when +referring to the same file. You may also enter the word constant if you +would like to assign a constant value to the input (the value will be +entered in the next column).

+
PATH
+

The path within the text or DSS file of the time series data. If you +used the constant keyword in the Input File column, enter the value +(e.g. 4.22) here.

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

HYDRO_TIME_SERIES

+
+
    +
  • Multiple sources and sinks can be assigned to a node. They are + usually kept separate in order to assign different concentrations to + them.
  • +
  • HYDRO is able to accept sources and sinks at boundary nodes, but + this is not good modeling practice. Use them on the interior.
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Boundary_Stage/index.html b/manual/reference/Boundary_Stage/index.html new file mode 100644 index 00000000..47876f0b --- /dev/null +++ b/manual/reference/Boundary_Stage/index.html @@ -0,0 +1,628 @@ + + + + + + + + + + + + + + + + + + Boundary Stage - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Boundary Stage

+

Overview

+

Stage Boundaries are locations where water levels are known. They are +often used to represent tidal boundary of an estuary. This view defines +the tidal boundary and assigns a time series to water levels at that +boundary

+

Tables

+

Example

+
# Description:
+# Historical stage at Martinez
+BOUNDARY_STAGE
+NAME  NODE  FILLIN  FILE           PATH                                                
+mtz   361   linear  ${BNDRYINPUT}  /FILL+CHAN/RSAC054/STAGE//15MIN/${HISTSTAGEVERSION}_NAVD/ 
+END
+
+

Stage Boundary Table

+

The Stage Boundary table defines the stage boundary by giving it a name +and associating it with a node. The table also assigns a time series to +the boundary. Stage Boundary is a top-level layered table.

+

Field Descriptions

+
NAME
+

Name assigned to the source. This is the identifier of the boundary and +is referred to elsewhere in the input system. If you assign water +quality you will use the same name in order to match concentration to +flow.

+
NODE
+

Node number at which the source is applied.

+
FILLIN
+

Method for filling in data if the time step of the assigned series is +coarser than the time step of the model. See fillin types

+
FILE
+

DSS or text file in which data are stored. Use consistent case when +referring to the same file. You may also enter the word constant if you +would like to assign a constant value to the input (the value will be +entered in the next column).

+
PATH
+

The path within the text or DSS file of the time series data. If you +used the constant keyword in the Input File column, enter the value +(e.g. 4.22) here.

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

HYDRO_TIME_SERIES

+

Only one boundary (flow or stage) should be assigned at a node. HYDRO is +able to accept a sources and sinks at boundary nodes, but this is not +good modeling practice.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Channel_Initial_Condition/index.html b/manual/reference/Channel_Initial_Condition/index.html new file mode 100644 index 00000000..edabc666 --- /dev/null +++ b/manual/reference/Channel_Initial_Condition/index.html @@ -0,0 +1,629 @@ + + + + + + + + + + + + + + + + + + Channel Initial Condition - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Channel Initial Condition

+

Overview:

+

HYDRO requires water surface and flow initial condition. This view +allows the user to specify default initial conditions. The default +initial condition is required. The default will be overridden if a +restart file is used.

+

Tables:

+ +

CHANNEL_IC

+

The table pairs channel locations with default initial values. +Interpolation is used between locations. Water surface (stage) and flow +must be specified at the upstream and downstream ends of the channel.

+

Field Descriptions

+
CHAN_NO
+

Channel number of channel where initial condition is to be applied.

+
DISTANCE
+

Distance along channel where initial condition is to be applied. This +may be a numerical distance or the keyword "length" to indicate the end +of the channel. If you edit an entry that says "length", you may see a +complicated coded value, which is only for internal use.

+
STAGE
+

Initial water surface elevation.

+
FLOW
+

Initial flow (cfs).

+

Table Info

+
Identifier:
+

CHAN_NO, DISTANCE

+
Parent Table:
+

Table is parent

+
Include Block:
+

INITIAL_CONDITION

+
+
    +
  • Default initial values specified in the GUI are replaced if a + restart file is used.
  • +
  • Reservoir initial surfaces should be matched to the surrounding + channels. Differences imply a flow, and if you haven't accounted for + the flow in your other initial conditions you will have a flow + imbalance or even instability on the first step.
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Channels/index.html b/manual/reference/Channels/index.html new file mode 100644 index 00000000..a41acd5c --- /dev/null +++ b/manual/reference/Channels/index.html @@ -0,0 +1,720 @@ + + + + + + + + + + + + + + + + + + Channels - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Channels

+

Overview:

+

Channels are the fundamental objects of the DSM2 grid. The Channels +table allows you to enter channel connectivity, parameters and geometry. +Channels connectivity is defined by an upstream and downstream node +numbers of the channels. Two child tables describe the locations and +geometry of user-described cross-sections in the selected channel. Note +that a default initial condition is required for every channel number in +the DSM2 grid, and this is entered seperately in the Channel Initial +Conditions table.

+

Tables:

+ +

CHANNEL

+

The CHANNEL table defines the connectivity, length, friction and +dispersion characteristics of a channel.

+

Field Descriptions

+

CHAN_NOChannel number. This is the identifier of the channel, and +corresponds to the number you typically see on a grid map.LENGTH +(ft)Length of the channel reachMANNINGManning's n friction coefficient +for the whole reach.DISPERSIONDimensional dispersion factor.UPNODENumber +of the upstream node at which channel is connected.DOWNNODENumber of the +downstream node at which channel is connected.

+

Table Info

+

Identifier:CHAN_NOParent Table:Table is parentInclude Block:GRID

+
+

XSECT

+

This table lists files where bathymetric cross-sections are specified by +the user using the CSDP format. The table lists the fraction of the +distance along the reach (from upstream to downstream) at which the user +cross-section is located. These cross-sections will be interpolated by +the model at computational points. Overspecification of geometry is a +frequent source of user error/misconception, please see usage +notes below. Also note that this style of input and the XSECT_LAYER +"single file" format below should not be freely mixed for a given +channel -- use one or the other.

+

Field Descriptions

+

CHAN_NOChannel number where cross-section is locatedDISTFraction of +distance from upstream node to downstream node where cross-section is +locatedFILECSDP-formatted file where cross-section geometry is defined.

+

Table Info

+

Identifier:CHAN_NO, DISTParent Table:CHANNELParent +Identifier:CHAN_NOInclude Block:GRID

+
+

XSECT_LAYER

+

The Cross-Section Layer Table lists geometric information about each +cross-section. This information is in the form of lookup tables of +hydraulically important quantities such as area, width and wetted +perimeter.

+

Field Descriptions

+

CHAN_NOChannel number in which cross-section is located.DISTFraction of +distance from upstream node to downstream node where cross-section is +locatedELEVElevation from bottom at which properties are known. The +area, width, etc. apply to this elevation, and channel properties +between elevations are linearly interpolated.AREAArea of channel from +bottom to cross section(sq ft). Ignored if Area disagrees with the +integral of WIDTH.WIDTHWidth of channel at top (ft).WET_PERIMWetted +perimeter of channel at given elevation.

+

Table Info

+

Identifier:CHAN_NO, DIST, ELEVParent Table:CHANNELParent +Identifier:CHAN_NOInclude Block:GRID

+
+

Examples:

+

CHANNEL with XSECT_LAYER cross-section

+
# This example shows channels using cross-sections
+# In the XSECT_LAYER format. The benefit of this format
+# is that the input can all be put in one file.
+# This can be useful for archiving or echoing back input
+
+# CHANNEL SPECS
+CHANNEL
+CHAN_NO LENGTH MANNING DISPERSION UPNODE DOWNNODE
+1        15000   0.035        0.3      1        2 
+2        15000   0.035        0.3      2        3
+END
+
+# This is a child table. Its rows must "link" to a parent
+# using the parent id (CHAN_NO in this case).
+# Note that two cross-sections are defined here,
+# one in each channel, halfway downstream, with three
+# layers each. 
+XSECT_LAYER
+CHAN_NO DIST  ELEV   AREA WIDTH WET_PERIM
+1        0.5 -24.0    0.0  40.0      40.0 
+1        0.5   0.0  960.0  80.0     91.22 
+1        0.5  20.0 2640.0 160.0     133.6 
+2        0.5 -24.0    0.0  40.0      40.0 
+2        0.5   0.0  960.0  80.0     91.22 
+2        0.5  20.0 2640.0 160.0     133.6 
+END
+
+

CHANNEL with XSECT (csdp) cross-section

+
# This example shows channels using cross-sections
+# In the XSECT format. The specification is not 
+# complete -- we are really referring to 
+# Cross-Section Development Program (CSDP) files
+# which are in there own format.
+
+# CHANNEL SPECS
+CHANNEL
+CHAN_NO LENGTH MANNING DISPERSION UPNODE DOWNNODE
+1        15000   0.035        0.3      1        2 
+2        15000   0.035        0.3      2        3
+END
+
+# This is a child table. It is an alternative to the 
+# XSECT_LAYER table (the two can co-exist, but you 
+# should not mix input for a channel). The FILE column
+# points to a file that contains the real data which
+# would normally come out of the CSDP or other tool.
+XSECT
+CHAN_NO DIST     FILE
+1           0.5   1_0.50000.txt
+2           0.5   2_0.50000.txt
+END
+
+
+
    +
  • All channels must have an initial condition and at least one + cross-section.
  • +
  • Older versions of DSM2 had the notion of a "regular" cross-section + (meaning rectangular). In the current DSM2 this is just a + cross-section with two layers.
  • +
  • Users frequently overspecificy cross-sections, either by specifying + more cross-sections longitudinally than the model can possibly use + or by describing cross-sections vertically in such a way as to + capture highly local features such as small constrictions, sills and + undulations. DSM2 is commonly used with spatial resolution (delta x) + of several thousand feet. You should only include features that are + well resolved by this resolution, which means changes that persist + over several miles. Even more importantly, you should avoid adjacent + cross-sections with bottoms that vary greatly in elevation because + they can cause odd behavior when cross-sections are interpolated to + computation points. The bottoms layers of cross sections should + represent the "overall" slope of the channel.
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/ENVVARS_Section/index.html b/manual/reference/ENVVARS_Section/index.html new file mode 100644 index 00000000..bd0a8415 --- /dev/null +++ b/manual/reference/ENVVARS_Section/index.html @@ -0,0 +1,645 @@ + + + + + + + + + + + + + + + + + + ENVVARS Section - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

ENVVARS Section

+

Overview:

+

ENVVARs are values used in text substitution elsewhere in the input. +DSM2 attempts to replace any text that is preceded by a "$" and wrapped +in curly braces: ${EXAMPLE}. By convention, these variables are always +used in upper case. The substitution will be made from either system +environmental variables or pseudo-environmental variables defined in +this section. For instance, the SCALAR input section might indicate that +run_start_time be set to ${START_TIME}. DSM2 will then search the system +environmental variables and user-specified environmental variables for +START_TIME and substitute the value (or print a warning if it finds +nothing).

+

ENVVARs can be specified in text, or set by manipulating the command +environmental variables. In production runs, many of the ENVVARs are set +in a special file called the "configuration" file. Such a file is often +included in the main input file using the CONFIGURATION include block.

+

Reference

+

Keyword Descriptions

+
NAME
+

Name of the envvar. This is the alias that will be used elsewhere in the +input system where the substitution is desired. For instance, if the +NAME is START_TIME, ${START_TIME} would be used elsewhere.

+
VALUE
+

Value assigned during substitution. For instance, for an ENVVAR with +name START_TIME, a likely value would be "0000".

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

CONFIGURATION

+

Examples:

+

Definition and use: The following example defines an ENVVAR section and +then uses the variables later in a SCALAR section.

+
ENVVARS        
+NAME    VALUE   
+START_DATE  01JAN1990   # Runtime using envvars
+END_DATE    01JAN2001   
+START_TIME  0000    
+END_TIME    0000    
+END
+
+
+SCALAR      
+model_name  historical_hydro    
+run_start_date  ${START_DATE}   
+run_end_date    ${END_DATE} 
+run_start_time  ${START_TIME}   
+run_end_time    ${END_TIME} 
+END
+
+

Identifier:Table Info

+

NAME

+
Parent Table:
+

Table is parent

+
Include Block:
+

CONFIGURATION

+
+
    +
  • +

    ENVVARs can also be used on each other -- in text input that occurs + after the ENVVAR definition.

    +

    ENVVARS
    +NAME VALUE
    +DSM2MODIFIER historical_v81 #Study name used for DSM2 output

    +

    Output

    +

    OUTPUTFILE ${DSM2MODIFIER}.dss

    +

    hydro

    +

    HYDROOUTDSSFILE ${DSM2OUTPUTDIR}/${OUTPUTFILE} +END

    +
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Gate/index.html b/manual/reference/Gate/index.html new file mode 100644 index 00000000..6bdfd9c7 --- /dev/null +++ b/manual/reference/Gate/index.html @@ -0,0 +1,1151 @@ + + + + + + + + + + + + + + + + + + Gate - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Gate

+

Overview:

+

Gates are sites that present a barrier or control on flow. A gate may +have an arbitrary number of associated hydraulic devices (pipes and +weirs), each of which may be operated independently to control flow.

+

The Gates View is primarily for specifying the physical properties of +the gate and some simple operating modes. Gates that are operated simply +can be completely specified in this table. Much more elaborate controls +are possible using Gate Time Series and Operating Rules, and in addition +to manipulating the hydraulic devices you can completely uninstall the +gate.

+

Tables:

+ +

GATE

+

The Gate table defines the name and connectivity of the gate. Gates are +a top-level layered table.

+

Field Descriptions

+
NAME
+

Name of the gate. This is the identifier of the gate used elsewhere to +refer to the gate.

+
FROM_OBJ
+

Type (channel/reservoir) of the water body to which the gate is +attached. Gates are always connected from a water body to a node. This +column is a picklist that is also connected to the Name/no. column.

+
FROM_IDENTIFIER
+

Identifier (channel number or reservoir name) of the water body to which +the gate is attached.

+
TO_NODE
+

Node to which gate is attached.

+

Table Info

+

Identifier:

+
NAME
+

Parent Table:

+
GATE
+

Include Block:

+
GRID
+
+

GATE_WEIR_DEVICE

+

This table lists hydraulic structures that exist at the gate site to +control flow that resemble weirs or rectangular conduits. In this table, +the user specifies physical properties of the device as well as default +operations. Both employ the following formulas depending on whether the +water surface is higher on the water body or node side of the gate:

+

Q = nCop_toCtoA(zwb, p) sqrt[ +2g(zwb - znode) ] ... zwb > +znode

+

Q = nCop_fromCfromA(znode, p) sqrt[ +2g(znode - zwb) ] ... zwb \< +znode

+

Where:

+
    +
  • n is the number of duplicate devices>
  • +
  • Cop_to and Cop_to are operating coefficient + representing controls such as flap gates
  • +
  • Cto and Cfrom are coefficients representing + the hydraulic efficiency of the gate
  • +
  • A is the area of flow depending on higher water surface and position + p
  • +
  • g is gravity and
  • +
  • zwb and znode are the water surface elevations + at the water body and node (node surface is assessed by means of a + reference channel that has no gates attached to it).
  • +
+

Please see usage notes below

+

Field Descriptions

+
GATE_NAME
+

Name of the gate this device in which the device is located.

+
DEVICE
+

Name of the device.

+
NDUPLICATE
+

Number of exact duplicates, such as a number of similar pipes in +parallel. Parameters such as width apply to a single one of the +duplicates.

+
WIDTH
+

Maximum width of the device (radius of a pipe, width of a weir).

+
ELEV
+

Invert elevation or weir crest.

+
HEIGHT
+

Height of the device from the invert elevation. This can be used to +represent the height of rectangular flashboards or of a radial gate. If +the surface goes above this height, flow will be submerged. Use NA for +an open top. If you click in an NA column, you will see that it is +encoded using a large number, but you should only use 'NA' or a real +height.

+
CF_FROM_NODE
+

Flow coefficient of the gate (0 \< Cto \<= 1.0) describing the +efficiency of the gate from node to water body. This parameter is the +physical coefficient of flow. It should never be zero and should not be +used to describe a control structure or operation such as flap gates or +gate openings.

+
CF_TO_NODE
+

Same as CF_FROM_NODE, but for the direction from water body to node.

+
DEFAULT_OP
+

Default operation mode. The gate operation is a "magic" parameter +between 0.0 and 1.0 that modulates gate flow. Operating coefficients can +be used to represent flap gates, fractions of duplicates operating or +other physical controls. The default ops are simple on this table are +like initial conditions -- if you want more sophisticated control you +will need to use a Gate Time Series or Operating Rule. Nevertheless, the +defaults are enough to represent structures that are fully open or +closed or operated unidirectionally. Here is how the default operation +mode will affect the operating coefficient:

+
gate_open
+

Cop_to=1.0; Cop_from=1.0;

+
gate_close
+

Cop_to=0.0; Cop_from=0.0;

+
unidir_to
+

Cop_to=1.0; Cop_from=0.0;

+
unidir_from
+

Cop_to=0.0; Cop_from=1.0;

+

Table Info

+
Identifier:
+

GATE_NAME, DEVICE

+
Parent Table:
+

GATE

+
Parent Identifier:
+

GATE_NAME

+
Include Block:
+

GRID

+

GATE_PIPE_DEVICE

+

This table lists pipes at the gate site. In this table, the user +specifies physical properties of the device as well as default +operations.

+

Field Descriptions

+
GATE_NAME
+

Name of the gate this device in which the device is located;

+
DEVICE
+

Name of the device.

+
NDUPLICATE
+

Number of exact duplicates, such as a number of similar pipes in +parallel. Parameters such as width apply to a single one of the +duplicates.

+
RADIUS
+

Maximum width of the device (radius of a pipe, width of a weir).

+
ELEV
+

Invert elevation or weir crest.

+
CF_FROM_NODE
+

Flow coefficient of the gate (0 \< Cto \<= 1.0) describing the +efficiency of the gate from node to water body. This parameter is the +physical coefficient of flow. It should never be zero and should not be +used to describe a control structure or operation such as flap gates or +gate openings.

+
CF_TO_NODE
+

Same as CF_FROM_NODE, but for the direction from water body to node.

+
DEFAULT_OP
+

Default operation mode. The gate operation is a "magic" parameter +between 0.0 and 1.0 that modulates gate flow. Operating coefficients can +be used to represent flap gates, fractions of duplicates operating or +other physical controls. The default ops are simple on this table are +like initial conditions -- if you want more sophisticated control you +will need to use a Gate Time Series or Operating Rule. Nevertheless, the +defaults are enough to represent structures that are fully open or +closed or operated unidirectionally. Here is how the default operation +mode will affect the operating coefficient:

+
gate_open
+

Cop_to=1.0; Cop_from=1.0;

+
gate_close
+

Cop_to=0.0; Cop_from=0.0;

+
unidir_to
+

Cop_to=1.0; Cop_from=0.0;

+
unidir_from
+

Cop_to=0.0; Cop_from=1.0;

+

Table Info

+
Identifier:
+

GATE_NAME, DEVICE

+
Parent Table:
+

GATE

+
Parent Identifier:
+

GATE_NAME

+
Include Block:
+

GRID

+
+

Several types of time series and operational controls can be placed on gates

+
    +
  • +

    At least one channel at every node must be ungated.

    +
  • +
  • +

    Gates can be removed using an operation rule that sets the + gates install variable  to zero. Gates that are uninstalled behave + like normal nodes with equal water surface constraints between them. + Operations and time series that manipulate the device operating + coefficients and positions will be applied, but the devices will be + totally ignored in computations to determine flow. flow.

    +
  • +
  • +

    Gates can be controlled by a number of variables that are + time-varying and controlled by time series or operating rules:

    +

    install

    +
      +
    • Install applies to the whole gate, not individual devices. When + the gate is uninstalled (install=0) the gate ceases to exist, + none of its devices are applied (although the continue to exist + in the background). The gate is totally replaced by an + equal-stage compatibility condition.
    • +
    +
  • +
  • +

    op_to_node

    +
      +
    • Operating coefficient in the direction from water body to node.
    • +
    +
  • +
  • +

    op_from_node

    +
      +
    • Operating coefficient in the direction from node to water body.
    • +
    +
  • +
  • +

    op

    +
      +
    • Operating coefficient in both directions. This is just a + convenience combo of the individual to/from node versions. It is + write-only in operating rules, because it combines two variables + and there is no single value that can be read.
    • +
    +
  • +
  • +

    position

    +
      +
    • Physical operating position whose interpretation depends on the + Position Control setting of the gate device. This is now + deprecated in favor of more direct manipulation of things like + gate elevation.
    • +
    +
  • +
  • +

    elev

    +
      +
    • Weir crest or pipe invert elevation. This can represent + evolution over time or a bottom-operating structure.
    • +
    +
  • +
  • +

    width

    +
      +
    • Weir width or pipe radius. This usually represents evolution + over time.
    • +
    +
  • +
  • +

    height

    +
      +
    • Weir gate height, width of a flashboard. This can represent + evolution over time or a top-operating structure like a radial + gate.
    • +
    +
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Groups/index.html b/manual/reference/Groups/index.html new file mode 100644 index 00000000..cc74b8d2 --- /dev/null +++ b/manual/reference/Groups/index.html @@ -0,0 +1,718 @@ + + + + + + + + + + + + + + + + + + Groups - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Groups

+

Overview

+

GROUPS are user-defined groups of model objects, for instance groups +of water bodies or groups of boundary inputs. Groups are used a number +of places in DSM2, including: tracking of constituents originated from +grouped sources, tracking of particles as they reside or move between +groups of water bodies and/or boundaries, and assignment of rate +coefficients in QUAL to groups of water bodies. In each context, the +types of model objects that are allowed in the groups may be slightly +different. That validation takes place elsewhere in the object using the +group.

+

Tables

+ +

GROUP

+

The GROUP table defines the name of a group. It has one column!!! The +reason we do this is to provide a top level table for overriding and +redefining groups in the layering system.

+

Field Descriptions

+
NAME
+

Name of the group. This is the identifier for the group used in +references elsewhere in the input system.

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

GROUPS

+
+

GROUP_MEMBER

+

The Group Members Table lists members of the parent group. The group +members are identified using patterns written using regular expression +and a special syntax for ranges of numbers. If this sounds like nonsense +-- don't worry. The examples should cover most of the important ways you +would want to define group members. Note also that you can use multiple +rows to define the group -- the result will be the union of the members +from the individual rows.

+

Field Descriptions

+
MEMBER_TYPE
+

The type (channel, etc) of model object.

+
Identifier/Pattern
+

A pattern that will be matched against the identifier of the object +(channel number, input name, etc). The pattern can be a regular +expression or use the special range notation.

+

Here are some examples:

+
range:132-176
+
+

Matches any number in this range,inclusive

+
dicu_drn_.*
+
+

Dot-star is a wildcard matches any name that starts with dicu_drn)

+
mtz
+
+

Exact name

+
(183|184|185)
+
+

A choice of number identifiers

+
(mtz|sjr)
+
+

A choice of names.

+
14[2-7]
+
+

The regular expression way of doing ranges, which works for a single +digit

+

Table Info

+
Identifier:
+

GROUP_NAME,MEMBER_TYPE,PATTERN

+
Parent Table:
+

GROUP

+
Parent Identifier:
+

GROUP_NAME

+
Include Block:
+

GROUPS

+
+
    +
  • A regular expressions description can be found + in wikipedia and a tutorial and guide can be found + at http://www.regular-expressions.info/. You can + probably do most of the group matching you want by modifying the + above sample patterns, but the possibilities are endless.
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/IO_Files/index.html b/manual/reference/IO_Files/index.html new file mode 100644 index 00000000..db22df30 --- /dev/null +++ b/manual/reference/IO_Files/index.html @@ -0,0 +1,690 @@ + + + + + + + + + + + + + + + + + + IO Files - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

IO Files

+

Overview:

+

The IO_FILES table is where you declare most of the non-dss output +from a simulation, including echoed text output files, restart files and +output tidefiles from HYDRO(input tidefiles are specified for QUAL and +PTM in the TIDEFILEsection). IO_FILES can only be specified in +the main text input file (hydro.inp, qual.inp, ptm.inp).

+

Tables:

+

IO_FILE

+

Keyword Descriptions

+
MODEL
+

Model generating the file. For a restart file this should be the model +(hydro|qual) that is being restarted. For echoed output use the keyword +"output".

+
TYPE
+

Type of file: hdf5, restart,  or "none" for echoed output.

+
IO
+

Type of file "in" "out" or "none" for echoed output.

+
INTERVAL
+

Interval, for hdf5 tidefile output.

+
FILENAME
+

Name of file. Should have a suitable extension: *.hrf for hydro restart +file, *.qrffor qual restart file, *.h5 for hdf5 tidefile or *.out for +echoed output.

+

Table Info

+
Identifier:
+

none: no layering

+
Include Block:
+

none: launch file only

+

Examples:

+

HYDRO example:

+

This example includes standard hydro runtime output file, a restart +output file that is regenerated every model day (overwriting the +previous day's file), an hdf5 tidefile for passing information to QUAL +and an echo file (replicate of input). All of the file names use text +substitution -- the value would come from an environmental variable, +ENVVARS section in the input file or ENVVARS section of a config file.

+
IO_FILES      
+MODEL  TYPE     IO    INTERVAL FILENAME  
+output none     none  none     ${HYDROOUTFILE}  
+hydro  restart  out   1day     ${QUALRSTFILE}  
+hydro  hdf5     out   1hour    ${HYDROHDF5FILE}  
+hydro  echo     out   none     ${DSM2MODIFIER}_hydro_echo.inp  
+END
+
+

QUAL example:

+

This example includes a general qual runtime output file, a restart +output file that is regenerated every model day (overwriting the +previous day's file), a restart file that will be used to generate the +initial condition for the run, and an hdf5 tidefile for passing +information to QUAL and an echo file (exact replicate of input).

+
IO_FILES      
+MODEL  TYPE    IO   INTERVAL FILENAME  
+output none    none none     ${QUALOUTFILE}  
+qual   restart out  1day     ${QUALRESTART}  
+qual   restart in   none     qualinit_30SEP1999.qrf  
+qual   hdf5    out  1hour    ${QUALHDF5FILE} 
+qual   echo    out  none     ${DSM2MODIFIER}_qual_echo.inp  
+END
+
+

PTM example:

+

This example includes a PTM trace file (which is required to produce +flux DSS output) and an animation file (which is required for animated +output).

+
IO_FILES      
+MODEL TYPE  IO  INTERVAL FILENAME  
+ptm   trace out none     ${DSM2OUTPUTDIR}/trace.out  
+ptm   anim  out 15min    ${DSM2OUTPUTDIR}/anim.out
+ptm   echo  out none     ${DSM2MODIFIER}_ptm_echo.inp  
+END
+
+

The runtime output file from HYDRO is used in the preparation of PTM +visualization tools.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Input_Climate/index.html b/manual/reference/Input_Climate/index.html new file mode 100644 index 00000000..ab49f125 --- /dev/null +++ b/manual/reference/Input_Climate/index.html @@ -0,0 +1,609 @@ + + + + + + + + + + + + + + + + + + Input Climate - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Input Climate

+

Overview:

+

Climate inputs are time series assignments to climate variables used in +non-conservative constituent runs.

+

Tables:

+
    +
  • +
      +
    • INPUT_CLIMATE
    • +
    +
  • +
+

INPUT_CLIMATE

+

Climate input assigns time varying properties to gate parameters, The table assigns a time series data source.

+

Field Descriptions

+
NAME
+

Name of the input, used for layering.

+
VARIABLE
+

The variable that is set by this assignment.

+
FILLIN
+

Method for filling in data if the time step of the assigned series is +coarser than the time step of the model. See fillin types

+
FILE
+

DSS or text file in which data are stored. Use consistent case when +referring to the same file. You may also enter the word constant if you +would like to assign a constant value to the input (the value will be +entered in the next column).

+

PATHThe path within the text or DSS file of the time series data. If you +used the constant keyword in the Input File column, enter the value +(e.g. 4.22) here.

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

QUAL_TIME_SERIES

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Input_Gate/index.html b/manual/reference/Input_Gate/index.html new file mode 100644 index 00000000..d39b00d5 --- /dev/null +++ b/manual/reference/Input_Gate/index.html @@ -0,0 +1,632 @@ + + + + + + + + + + + + + + + + + + Input Gate - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Input Gate

+

Overview:

+

Gate inputs are time series assignments to gate structure physical and +operational parameters.

+

Tables:

+ +

INPUT_GATE

+

A gate input assigns time varying properties to to gate parameters, The +table assigns a time series data source.

+

Gate paths in DSS should be of data type INST-VAL as opposed to PER-AVER +(which provide for better viewing in HECDSSVue) else it is possible that +gate does not operate as expected.

+

Field Descriptions

+
GATE_NAME
+

This must be the same as the name of the gate.

+
DEVICE
+

This must be the same as the name of the gate device. Generally all the +variables except "install" are device specific. If the variable is +"install" set the device to "none".

+
VARIABLE
+

The variable that is set by this assignment.

+
FILLIN
+

Method for filling in data if the time step of the assigned series is +coarser than the time step of the model. See fillin types

+
FILE
+

DSS or text file in which data are stored. Use consistent case when +referring to the same file. You may also enter the word constant if you +would like to assign a constant value to the input (the value will be +entered in the next column).

+
PATH
+

The path within the text or DSS file of the time series data. If you +used the constant keyword in the Input File column, enter the value +(e.g. 4.22) here.

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

HYDRO_TIME_SERIES

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Input_Transfer_Flow/index.html b/manual/reference/Input_Transfer_Flow/index.html new file mode 100644 index 00000000..f6d6656c --- /dev/null +++ b/manual/reference/Input_Transfer_Flow/index.html @@ -0,0 +1,604 @@ + + + + + + + + + + + + + + + + + + Input Transfer Flow - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Input Transfer Flow

+

Overview:

+

Transfer Flows are flow time series assignments to pre-defined mass +transfers.

+

Tables:

+

INPUT_TRANSFER_FLOW

+

The transfer flow table assigns time series flows to transfers, The +table assigns a time series data source to the boundary condition.

+

Field Descriptions

+
TRANSFER_NAME
+

This must be the same as the name of the transfer.

+
FILLIN
+

Method for filling in data if the time step of the assigned series is +coarser than the time step of the model. See fillin types

+
FILE
+

DSS or text file in which data are stored. Use consistent case when +referring to the same file. You may also enter the word constant if you +would like to assign a constant value to the input (the value will be +entered in the next column).

+
PATH
+

The path within the text or DSS file of the time series data. If you +used the constant keyword in the Input File column, enter the value +(e.g. 4.22) here.

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

HYDRO_TIME_SERIES

+
+

Only one flow (and no concentration) can be assigned to a transfer.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Layers/index.html b/manual/reference/Layers/index.html new file mode 100644 index 00000000..0f511018 --- /dev/null +++ b/manual/reference/Layers/index.html @@ -0,0 +1,679 @@ + + + + + + + + + + + + + + + + + + Layers - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Layers

+

Overview:

+

DSM2 batches input data into files or "layers" in order to achieve the +following goals:

+
    +
  • To group input into cohesive packages with similar content(examples: + the standard grid, sdip operating rules).
  • +
  • To identify which items are changed when a new group of inputs is + added to an existing simulation.
  • +
+

For example, consider the two layers of channels in the figure below. +The first layer defines seven channels and would have seven entries in +the CHANNEL table. This might represent a "base" grid. The second layer +changes the properties of Channel 2, adds a Channel 8 and removes +Channel 7. The second layer will have only three entries, shown in red. +These entries represent the changes relative to Layer 1, and presumably +are thematically related.

+

+
+

Example: Channel

+

Consider the above example using text input. We are going to +create CHANNEL tables representing the channel connectivity, and assume +the geometry is provided with CSDP style cross-sections listed in +an XSECT table (child items are always associated with parent items in +the same file).

+

The base data will be in a file channel_base.inp:

+

channel_base.inp

+
CHANNEL
+CHAN_NO LENGTH MANNING DISPERSION UP_NODE DOWN_NODE
+1        18000   0.030       0.80       1         2
+2         8000   0.040       0.80       2         3
+3        18000   0.040       0.80       3         4
+4        18000   0.040       0.80       4         5
+5        18000   0.040       0.80       3         5
+6        22000   0.040       0.80       5         6
+7        14000   0.040       0.80       6         7
+END
+
+XSECT
+CHAN_NO   DISTANCE    FILE
+1         0.200       1_0_200.txt
+1         0.800       1_0_800.txt
+2         0.500       2_0_500.txt
+...
+7         0.900       7_0_900.txt
+END
+
+

The revisions are in channel_revise.inp:

+

channel_revise.inp

+
CHANNEL
+CHAN_NO LENGTH MANNING DISPERSION UP_NODE DOWN_NODE
+2         8000   0.030       0.80       2         3 # Masks + Alters
+#3        9000   0.000       0.00      19        20 # Has no effect
+^7       14000   0.040       0.80       6         7 # Masks + Deletes
+...
+8        16000   0.040       0.80       8         3 # Adds
+END
+
+XSECT
+CHAN_NO DISTANCE  FILE
+2          0.100  2_0_500.txt  # Masks lower level x-sects 
+2          0.700  2_0_500.txt  #
+7          0.900  7_0_900.txt  # Will be ignored
+8          0.500  8_0_500.txt  # 
+END
+
+

The two layers are managed by the model input file that is given +directly to the model, in this case hydro_layering.inp. The two +channel files are listed in a GRID include block that lists the layers +in increasing priority.

+

hydro.inp

+
GRID
+channel_base.inp
+channel_revise.inp
+END
+
+

Now lets consider the details...

+

Include Blocks

+

Include blocks are input blocks in the master file that list other +files. The data from these other files is "included" in the order +listed. Priority is given to files read later, and these are assigned a +higher "layer number"

+

Include blocks can only contain specific types of input data. For +instance, a GRID input block only contains channel, gate, reservoir and +transfer physical specifications (not boundary conditions attached to +them). So the trick to using include blocks is knowing, say, that a +CHANNEL table belongs in a file in a GRID include block and +BOUNDARY_FLOW table belongs in a file in a HYDRO_TIME_SERIES block. In +the reference documentation, the include blocks should be listed for +each table in the Table Information section..

+

The only exception is the master file that is the one sent to the model +on the command line (often named something like hydro.inp, qual.inp, +ptm.inp). Data in this file always take precedence over other input.

+

Layer Overriding

+

Layer overriding occurs when the same data item is defined in multiple +layers (files) in the same model. Files that are read later are given a +higher "layer number" and take precendence over files read earlier. +Within the same file it is an error to redefine an entry.

+

Identifiers

+

To use layering, you have to know what constitutes redefining an entry. +Whether two items represent the same data item depends on the identifier +for the table, which is some combination of columns that uniquely +identify the item using a name or number. Identifiers for each table are +listed in the reference documents. In the above example it is channel +number CHAN_NO. The trickiest identifiers are in the output, because +they involve two (NAME, VARIABLE) or three (NAME, VARIABLE,SOURCE_NAME) +columns. In the reference documentation, the identifier is listed for +each table in the Table Information section.

+

Parent-child Tables

+

When parent-child tables are present in a file (e.g., Channels, Cross +Section, Cross Section Layer), overriding is assessed at the level of +the parent or top-level table. When you override on a top-level table, +its child table information is completely replaced as well. So, for +instance, the cross-section at Channel 2 Distance 0.500 in +channel_base.inp in the example is completely ignored. The model makes +no attempt to "mix it in" with the replacement version of Channel 2.

+

Child tables must be in the same file as their parent tables. This is a +departure from earlier versions of DSM2, but is necessary to make +layering well-defined.

+

Deleting lower level data

+

Occasionally, the motivation for overriding an item is to eliminate it. +You can do this on any top-level table by prepending a carat ^ at the +beginning of the line. This will remove items on lower levels with the +same identifier. Note that it doesn't matter what other data you put in +the parent fields (you do need placeholders). Also you needn't add child +information if the only reason for the parent entry is to delete it -- +but sometimes it is nice to have the child data there if you are +toggling back and forth.

+

Deleting data is quite different from commenting it out (using a # +sign). Commenting data out on a high level would merely mean that the +input reader would skip over the line. It would not affect any data with +the same identifier on a lower level.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Manual/index.html b/manual/reference/Manual/index.html new file mode 100644 index 00000000..2e32195d --- /dev/null +++ b/manual/reference/Manual/index.html @@ -0,0 +1,481 @@ + + + + + + + + + + + + + + + + + + Contents - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + + + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Node_Concentration/index.html b/manual/reference/Node_Concentration/index.html new file mode 100644 index 00000000..d4ea5406 --- /dev/null +++ b/manual/reference/Node_Concentration/index.html @@ -0,0 +1,629 @@ + + + + + + + + + + + + + + + + + + Node Concentration - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Node Concentration

+

Overview:

+

Node concentration represents the concentration of constituents attached +to boundary inflows and sources.

+

Tables:

+

NODE_CONCENTRATION

+

The NODE_CONCENTRATION table attaches concentrations to boundary and +source flows defined in HYDRO. The table also assigns a time series to +the source.

+

Field Descriptions

+
NAME
+

Name assigned to the source. An entry here must have the same name as an +entry in the BOUNDARY_STAGE, BOUNDARY_FLOW or SOURCE_FLOW tables -- by +matching names you will attach concentrations to the flow.

+
NODE_NO
+

Node number where the flow is applied. This must match the node number +given in the original flow table (it is a bit redundant, but easier to +look things up).

+
VARIABLE
+

Constituent name. If no output is requested for the constituent +currently it will be ignored.

+
FILLIN
+

Method for filling in data if the time step of the assigned series is +coarser than the time step of the model.

+
FILE
+

DSS or text file in which data are stored. Use consistent case when +referring to the same file. You may also enter the word constant if you +would like to assign a constant value to the input (the value will be +entered in the next column).

+
PATH
+

The path within the text or DSS file of the time series data. If you +used the constant keyword in the Input File column, enter the value +(e.g. 4.22) here.

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

QUAL_TIME_SERIES

+
+

Multiple sources and sinks can be assigned to a node. They are often +kept separate in order to assign different concentrations to them

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Operating_Rule/index.html b/manual/reference/Operating_Rule/index.html new file mode 100644 index 00000000..64175189 --- /dev/null +++ b/manual/reference/Operating_Rule/index.html @@ -0,0 +1,827 @@ + + + + + + + + + + + + + + + + + + Operating Rule - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Operating Rule

+

Overview:

+

Operating rules are user-written rules that manipulate model inputs such +as gate operations, boundary flows based on observations of the current +state of the running model. Operating rules are documented in detail in +the DSM2 Op Rule Guide. The Operating Rules table lists the time series, +expressions and rule definitions.

+

Tables:

+ +

OPERATING_RULE

+

Defines the name, action and trigger of the operating rule.

+

Field Descriptions

+
NAME
+

Name of the operating rule. This is the identifier of the rule.

+
ACTION
+

Definition of the action to be taken when the trigger transitions from +FALSE to TRUE.

+
TRIGGER
+

Trigger that activates the rule when it transitions from FALSE to TRUE. +If the trigger is NULL it will become the trivial TRUE trigger, which is +assumed to make a transition from FALSE to TRUE at startup (it is not +"always" active).Use

+

Table Info

+
Identifier:
+

NAME

+
Parent Table:
+

Table is parent

+
Include Block:
+

OPERATIONS

+
+

OPRULE_TIME_SERIES

+

This table lists time series that are used in forming action and trigger +definitions. The table is not a child table -- it is a top-level layered +table.

+

Field Descriptions

+
NAME
+

Name assigned to the time series. This is the identifier of the series. +It is also the name used to refer to the series in expressions.

+
FILLIN
+

Method used to interpolate when the model time step is finer than the +time series time step. Use "last" to use the last time stamp in the +period (a HEC-DSS convention) and "linear" to interpolate linearly

+
FILE
+

Input  file (HEC-DSS or text file in HEC-DSS format) storing the time +series or the word constant if the series is assigned a fixed value.

+
PATH
+

HEC-DSS path of the data within the Input File or the value (e.g. 2.0) +if the series is assigned a fixed value.

+

Table Info

+
Identifier:
+

NAME

+
Parent Table:
+

Table is parent

+
Include Block:
+

OPERATIONS

+
+

OPRULE_EXPRESSION

+

This table allows the user to list expressions that can be reused later +in operating rule actions and triggers. Expressions cannot depend on +other expressions. Expressions are not a child table -- the table is a +top-level layered table.

+

Field Descriptions

+
NAME
+

Name of the expression. This is the identifier of the expression. It is +also the name used to refer to the expression in expressions. 

+
DEFINITION
+

Definition of the expression -- this will be a forumula involving model +variables, seasons and time series. The time series can be from the +above time series table or elsewhere in the Input Time Series section. +Please see the Operating Rules Guide for more details.

+

Table Info

+
Identifier:
+

NAME

+
Parent Table:
+

Table is parent

+
Include Block:
+

OPERATIONS

+
+
    +
  • Numerous usage comments in the Operating Rules Guide.
  • +
  • Time series referenced in the operating rules may be defined in an + OPRULE_TIME_SERIES table or they may be time series defined + elsewhere, such as the name of a boundary flow.
  • +
  • Neither the OPRULE_EXPRESSION or OPRULE_TIME_SERIES table is a child + table of OPERATING_RULE. However, it is common to put related items + in the same file.
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Operating_Rule_Guide/index.html b/manual/reference/Operating_Rule_Guide/index.html new file mode 100644 index 00000000..5c52889e --- /dev/null +++ b/manual/reference/Operating_Rule_Guide/index.html @@ -0,0 +1,967 @@ + + + + + + + + + + + + + + + + + + Operating Rule Guide - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Operating Rule Guide

+

Operating Rule Guide

+

Introduction

+

DSM2 uses a text language for operating rules, and the rules are stored +in the database. Operating rules combine trigger and action directives, +each of which is an expression based on observed model states, seasonal +information and exogenous time series input as well as other +expressions.

+

Actions are things the operating rule does. In DSM2-DB, the actions +affect either gate devices or source/sink flow boundaries. For gate +devices the operating flow coefficient can be changed. For sources and +sinks, flow may be set to a new constant value or a new time series. +Expressions for actions tend to be of the form:

+
SET model_object TO numerical_expression
+
+

The action becomes applicable when a corresponding trigger goes from +false to true. Triggers are written with expressions that evaluate true +or false:

+
chan_stage(channel=132, dist=1000) < -0.1
+
+

Some rules are immediate responses to model conditions (close the gate +when stage dips below 0.5). Other rules use triggers to describe seasons +or situations where the action is applicable (reduce a boundary flow +when the month is between May and September). Still others rules apply +from the beginning of the run and the trigger column is just a nuisance +–.

+

Expressions

+

An expression is just a named quantity that is derived from model data, +outside time series data, math and time functions. An example of a +simple numerical expression based on current DSM2-DB flow looks like +this:

+
ebb := chan_flow(channel=132, dist=1000) > 0.01
+
+

This example samples the current time step model flow 1,000 ft +downstream of the upstream node in channel 132 and checks whether it is +greater than 0.01 cfs. The expression assigns the answer the name ebb, +so it can be reused in later expressions. Note that ebb is a logical +expression which evaluates to true or false depending on the model time +step. Numerical expressions will be introduced shortly.

+

Assignments of named expressions always start with a name the assignment +operator “:=”. Spaces around the assignment and greater-than operators +are optional. The assignment operator isn’t actually used in the GUI, +because there is a separate column for the name and definition.

+

The chan_flow part of the expression represents the value of a model +variable. Model variables typically require identifiers, which are +included in parenthesis and are a comma-separated list with elements +that depend on the context (see the section below on DSM2 model variable +identifiers). These identifiers can be numerical or text strings:

+
chan_flow(channel=132, dist=1000) ...numerical
+
+gate_op(gate=middle_river_barrier, device=weir) ...strings
+
+

The examples thus far have been logical expressions. Logical expressions +usually appear in triggers rather than actions. Besides logical +expressions, expressions that evaluate to numerical values can be +defined:

+
ebbmagnitude := log(chan_flow(channel=132, dist=1000))
+
+

and expressions can also involve simple math operators. For instance:

+
ebbmagnitude := log(chan_flow(channel=132, dist=1000))
+
+

is an expression that evaluates flow, applies the log function to it and +then assigns it to the variable name ebbmagnitude. For details, see the +section below on Math Operators)

+

Model time can also be used in expressions. The following expression +describes the VAMP season for San Joaquin river management:

+
vamp := (MONTH == APR) or (MONTH == MAY)
+
+

The definition could also include the date, day of the month, or time of +day.

+

month, or time of day.
+Finally, the following example combines a model state (stage/water +surface) observation, an external time series (called tide_level) and +some simple arithmetic. The expression might be used with a slowly +fluctuating tide or sea level datum to provide an idea of critical stage +in the South Delta compared to ambient tide conditions.

+
critical_stage := chan_stage(channel=132,dist=1000)<(tide_level-1.0)
+
+

Operating Rules

+

It is now straightforward to use expressions in operating rules. The +following example is based on expressions that were developed above. +Bold face words correspond to tables or columns of the GUI.

+

Name

+
middle_vamp_ebb
+
+

Expressions

+
ebb := chan_flow(channel=132, dist=1000) > 0.01
+vamp := (month == Apr) OR (month == May)
+
+

Trigger

+
vamp AND ebb
+
+

Action

+
SET gate_op(gate=middle_river_barrier, device=weir) TO ts(new_time_series)
+
+

The middle_vamp_ebb operating rule lies dormant until the first time +step when vamp and ebb (a compound expression based on the expressions +vamp and ebb) becomes true. At that point the action will be +taken and the weir operating coefficient will start to operate according +to the values in the DSS time series new_time_series. Note that except +for the expression definitions, the parts of this operating rule can be +united using the name assignment (:=) and WHERE directives:

+
middle_vamp_ebb := SET gate_op(gate= middle_river_barrier,device = weir) TO ts(new_time_series) WHERE (vamp AND ebb)
+
+

This is the form of the operating rule that would be used, say, when +parsing a text file rather than using the GUI.

+

Prediction

+

Anticipation using linear or quadratic extrapolation can be added to +numerical expressions in expressions using the PREDICT function. What is +nice about PREDICT is that it allows trigger expressions to more +accurately express the intent of a rule, because you don't need +"buffers" which are confusing and inaccurate.

+

For instance lets say you want to take some action like close a gate to +protect stage in channel 206 in the South Delta from going below zero. +If you use a buffer, you write the following:

+
SET [some action] WHEN chan_stage(chan=206, dist=0) <1);
+
+

This is confusing because the value "1" is used as the trigger criterion +when the intent has to do with stage of 0 and not 1. It is inaccurate +because it will go off no matter what the trend is. With anticipation, +the same rule would look like this:

+
SET [some action] WHEN PREDICT(chan_stage(chan=206, dist=0),LINEAR, 30MIN) < 0;
+
+

This states the trigger clearly in terms of the value 0. It is also much +less likely to go off by accident, because the time trend is used (stage +going below 1 is not significant if it is dropping very slowly and not +likely to make it to 0). In addition to LINEAR extrapolation quadratic +predictions are available using QUAD as the second argument to PREDICT. +Over time periods of less than an hour (and not right next to a gate or +reservoir), quadratic interpolation is markedly more accurate than +linear.

+

RAMP (transition)

+

For actions, there is also a way to smooth time. The keyword RAMP after +an action (together with a number of minutes) will transition in the +action gradually, if such a transition makes physical sense.

+

For instance, a ramping version of middle_vamp_ebb might use the +definition for ebb:

+
SET gate_op( gate=middle_r_barrier, device=radial) TO ts(new_time_series) RAMP 60min
+
+

Complementary Triggers and IFELSE

+

Often, an operating rule is paired with a complimentary rule that will +reverse its action. For instance, to complement the above rule for ebb +flow the following operating rule for flood flow might be added:

+

Name

+

middle_vamp_flood

+

Expressions

+
flood := chan_flow(channel=132, dist=1000) < -0.01
+vamp := (month == Apr) or (month == May)
+
+

Trigger

+
vamp and flood
+
+

Action

+
SET gate_op( gate=middle_r_barrier, device=barrier,direction=to_node) TO old_time_series
+
+

This rule effectively undoes the ebb action. The example underscores a +necessary but somewhat unintuitive point about triggers: they are +one-time and unidirectional. A rule whose trigger is vamp and ebb will +activate when this expression changes from false to true but will not do +anything or even notice if vamp and ebb subsequently becomes false +again. If the complementary behavior is desired, this intent must be +specified in a second rule. Often the complementary rule is subtly +different from the exact negation of the original; for instance, the +trigger vamp and flood is not the same as not(vamp and ebb). In the case +of the Montezuma Salinity Control Structure, the flood and ebb triggers +are not even based on the same variable (the gate is opened based on a +head difference, closed based on velocity).

+

The middle_vamp_ebb example combines vamp, which is the seasonal +applicability of the rule with ebb, which is a tidal phenomenon. There +are also meaningful operating rules that do not need a trigger at all. +For instance, the user might want to operate SWP and CVP pumping based +on a time series but bound it by some fraction of Sacramento inflow. The +trigger in this case is “TRUE” and it will go off once at startup. This +is the default in the GUI if you leave the trigger blank.

+

If what you really want is a trigger that continuously monitors a +true-false condition and applies a value accordingly, you may want to +consider using the IFELSE function and no trigger. For instance:

+
SET ext_flow(node=17) TO IFELSE( vamp, ts1, ts2)
+
+

will set the boundary flow at node 17 (San Joaquin River) to time series +ts1 whenever vamp is true and to ts2 when vamp is not true.

+

Misfires and Redundant Triggering

+

Extra triggering and rule activation may seem harmless when you consider +one rule in isolation. Rerunning an action hurts performance, but the +action is redundant rather than harmful. The real problem with rules +that misfire is that they are active too often and tend to interfere +with (“lock out” or “bump”) other rules that are trying to manipulate +the same model variable.

+

Here is an example of misfiring trigger based on an expression using +date terms:

+
(YEAR >= 1990 AND MONTH>=APR AND DAY>=14)
+
+

(note: a much better way to write this expression using the DATE keyword +is given in the reference section)

+

Because of the ANDs, this expression requires three conditions to be +true at once in order to evaluate to TRUE. It goes off as intended on or +about 14APR1990. But what happens on 01MAY1990? On 14MAY1990? This +trigger is going to evaluate to FALSE and then back to TRUE. When it +makes the FALSE-TRUE transition it will cause the trigger to go off, +which is probably not what was intended.

+

There is a fix for the above expression (not the recommended on) that +illustrates that the only thing that matter are FALSE-TO-TRUE +transitions. There is one more curious point about this example is that +the correct behavior is obtained using:

+
(YEAR == 1990 AND MONTH == APR AND DAY >= 14)
+
+

Why? The rule will evaluate FALSE on or about 01MAY1990, but it will +stay false!

+

These date examples are so common that there is a special way of dealing +with them. See the function reference for DATE and SEASON.

+

Default (TRUE) Trigger

+

If you leave the trigger definition blank in the GUI the trigger +expression will be set to WHEN TRUE.

+

The TRUE trigger is roughly equivalent to "at startup" and you should be +sure not to confuse it with "always true". Recall it is transitions that +are important, and this trigger makes its only nominal FALSE-TO-TRUE +transition once at the very beginning of the run. Once displaced by an +overlapping action, the rule will never activate again

+

A rule that evaluates to a trivial FALSE will never do anything.

+

As an example of a situation where these concepts matter, consider a +rule that toggles use of a gate for the entire simulation. By default, a +gate in the model is installed. Assume we have set up an expression +named use_barriers or remove_barriers indicating whether we want to use +gates. Three possibilities for writing the rule are:

+
   TRIGGER                ACTION 
+1. TRUE                SET gate_install(gate=...) TO use_gate
+2. use_gate            SET gate_install(gate...) TO INSTALL
+3. remove_gate         SET gate_install(gate=...) TO REMOVE
+
+

Option 1 uses the default trigger. It will be activated at startup and +the gate installation will be set to the expression variable use_gate. +Option 2 is interesting because it will never do anything useful. It +will be evaluated once at the start of the run, but it will never +trigger if use_gate is FALSE. It will trigger if use_gate is TRUE, but +this merely carries out the default. Option 3 remedies this by using +remove_gate -- the non-default -- as the trigger. Different users seem +to regard different options (1) and (3) more intuitive.

+

Conflicts

+

When a rule is triggered, it will be activated unless it conflicts with +another, active rule. Rules conflict when they operate on the same model +variable. For instance, two rules that act to change a weir coefficient +in the same gate/weir conflict.

+

Two specifications govern conflicts:

+

1. When a rule conflicts with an active rule it is deferred. Deferred +rules are not activated, but they are tricked into thinking they +evaluated FALSE so that the can possibly make a FALSE-TRUE transition +again the next time step.

+

2. When a rule conflicts with another potentially activating rule, the +results are “undefined”. We are unaware of any universal solution in +this situation. The best solution is to write rules that don’t do this – +we are currently working on a better warning system to detect when this +happens.

+

DSM2 Variable and Function Reference:

+

Variables

+

The variables from DSM2 that can be used in operating rules include +boundary and grid variables that can be changed and those that are +merely observable (read-only). The observable variables are divided +between variables that can be set to time series (Dynamic Variables) +that will apply ever-after and variables that can only be set to new +static values (Static Variables)

+

Dynamic Control Variables

+

These variables are dynamically controllable and can be set to a time +series. Once the new time series is set, the boundary or structure being +controlled will have no memory of its old controlling time series. Most +dynamic variables are gate and boundary data.

+
gate_op(gate=textname,device=textname, direction=[to_node|from_node|to_from_node])
+
+

Device operating coefficients (0..1) in corresponding direction. Use +keywords CLOSE (=0) and OPEN (=1) to make rules more readable. The +option "to_from_node" is write-only -- a convenience feature that writes +to two otherwise separate variables.

+
gate_position(gate=textname,device=textname)
+
+

Physical operation of control structure such as radial gate height +(physical units). The interpretation of "position" is dependent on the +"control_type" of the gate. If it is gated from the bottom, position +indicates elevation and is the same as elev. If the control type is +gated from the top, as in a radial gate, the position is the height. +This variable is deprecated now, in favor of directly using "elev" or +"height".

+
gate_height(gate=textname,device=textname)
+
+

Height of gate device.

+
gate_elev(gate=textname,device=textname)
+
+

Crest elevation or invert elevation of gate device.

+
gate_width(gate=textname,device=textname)
+
+

Width or radius of gate device.

+
ext_flow(name=textname)
+
+

External flow (boundary flows, source/sink)

+
transfer_flow(transfer=textname)
+
+

Flows in object-to-object transfers

+

Static Control Variables

+

These are variables that are normally static. You can set them to a +constant. If you set them to a time series, the model will not complain, +but the result may not be what you expect. The model variable will only +be set to the current value of the series at the time the rule was +activated. The variable won't keep changing with the time series.

+
gate_install(gate=textname)
+
+

Determines or inquires whether the given gate is installed.

+
SET gate_install(...) TO [REMOVE|FALSE]
+
+

completely removes the gate and restores an equal-stage compatibility +condition to the channel junction.

+
SET gate_install(...) TO [INSTALL|TRUE]
+
+

installs the gate.

+
gate_coef(gate=textname,device=textname,direction=[to_node|from_node])
+
+

Gate coefficient of the device in the given direction. This is a +physical quantitity of the structure, representing the roughness or +efficiency of flow. It should not be used for operating controls such as +flap gates. The coefficients will change only rarely when the actual +site is altered and should never leave the range (0,1).

+
gate_nduplicate(gate=textname,device=textname)
+
+

Number of duplicate devices.

+

Observable Variables

+

These are read-only model variables that cannot be manipulated directly, +but can be observed and used in expressions for triggers and actions.

+
chan_flow(channel=number,dist=[number|length])
+
+

Flow in channel.dist=length indicates the end of the channel.

+
chan_vel(channel=number, dist=[number|length])
+
+

Velocity at given channel and distance.

+
chan_stage(channel=number,dist=[number|length])
+
+

Water Surface at given channel and distance.

+
chan_surf
+
+

Same as stage (water surface) in channel

+
res_stage(res=textname)
+
+

Water surface in reservoir

+
res_flow(res=textname, node=number)
+
+

Flow from reservoir to node

+
ts(name=textname)
+
+

Any time series named in the Operating Rule View of the GUI may be used +by referencing the name. Time series evaluate to their value at the +current time step.

+

Model Time Queries

+

The following commands retrieve model date or seasonal information:

+
YEAR, MONTH, DAY
+
+

Retrieves the year, month and day associated with the current model time +step. These are returned as numbers. When testing them, you can (for +clarity) use 3-letter abbreviations for the months. Examples:

+
YEAR >= 1991
+
+MONTH + 1 < MAY
+
+HOUR, MIN
+
+

Retrieve the (24 hour) hour and minute associated with the current model +time step.

+
DATE
+
+

Returns a time stamp corresponding to the beginning of the day on the +current model date. Example:

+
DATE >= 11OCT1992 (not time part)
+
+DT
+
+

Represents the model time step in seconds. This is often useful for use +with ACCUMULATE

+
DATETIME
+
+

Returns a time stamp corresponding to the current model date and time. +Example:

+
DATETIME > 04FEB1990 00:00 (date plus time)
+
+SEASON
+
+

Returns a time stamp relative to the beginning of the year corresponding +to the beginning of the day on the current model date and time. +Comparisons such as SEASON > 15APR AND SEASON \<01MAY avoid common +logical mistakes from building this from scratch.
+There is one other gotcha with SEASON that comes up at the end of time +periods because the timestamp is always at 00:00. Compare SEASON > +15APR AND SEASON \<01MAY SEASON > 15APR AND SEASON ≤30APR and notice +that the latter does not include the entire day 30APR.

+

Note SEASON and DATE/DATETIME to combined expressions built from atomic +expressions like day and month. They are clearer and avoid some curious +gotchas. For instance DATE >= 14APR1990 will evaluate true only once +per year, whereas (YEAR >= 1990 AND MONTH>=APR AND DAY>=14) will +evaluate true on Apr 14, false on May 1 and true again on May 14. You +could get the intended behavior with (YEAR == 1990 AND MONTH == APR AND +DAY>=14), which will go from false to true only once, but the fix +hardly seems worth the trouble.

+

Numerical Operations

+

The following operators and functions are available

+
+, -, *, /
+
+

Arithmetic operators with standard precedence of operations. You can use +parenthesis to change the evaluation order.

+
x^3, x^y
+
+

Power of x and x to the power of y

+
MIN2(x,y)
+
+

Minimum of two arguments.

+
MAX2(x,y)
+
+

Maximum of two arguments.

+
MIN3(x,y,z)
+
+

Minimum of three arguments.

+
MAX3(x,y,z)
+
+

Maximum of three arguments.

+
SQRT(x)
+
+

Square root of x

+
EXP(x)
+
+

Exponent function (e to the power of x)

+
LN(x)
+
+

Natural log of x

+
LOG(x)
+
+

Base 10 log of x

+

Logical Operations

+
x==y
+
+

Tests equality.

+
x<>y
+
+

Tests inequality.

+
x<y,x>y, x<=y, x>=y
+
+

Comparisons.

+
TRUE
+
+

The value TRUE

+
FALSE
+
+

The value FALSE

+
NOT expression
+
+

Negation of expression, as in NOT(x \< y)

+
expr1 AND expr2
+
+

Logical ‘and’, which evaluates to TRUE only if both the expressions it +joins are true. Expression (expr2) will not be evaluated if expr1 +evaluates to FALSE.

+
expr1 OR expr2
+
+

Logical ‘or’

+

Special Functions

+
ACCUMULATE(expression, initval [,resetcond])
+
+

Cumulative value function. Accumulates additively the value of +expression using initval (another numerical expression) as the initial +condition and resetting the total anytime the resetcond evaluates to +true. If you want to integrate you should multiply the expression by DT +or else your rule won't be robust if someone changes the time step.

+
IFELSE(boolexpr, valexp1,valexpr2)
+
+

The ternary operator. If boolexpr returns true, returns the value given +by valexpr1. If boolexpr returns false, returns the value given by +valexpr2.

+
LOOKUP(expression, lookup_array,value_array)
+
+

Lookup values from a small user supplied table. The lookup array is +provided using a bracketed, comma-separated list of values such as +[1000.,2000.,3000.]. The value_array return values are similar but +must have a length one smaller than the number of lookup values. The +array values must be hard-wired numbers at the present time -- +expressions are not allowed. The LOOKUP compares expression to elements +of lookup_array. The highest element of the lookup table is currently a +limit, not an actual lookup slot. The function returns the component of +value_array corresponding to the highest index in lookup array that is +\<= expression, e.g.:

+
LOOKUP(1000.,[1000.,2000.,3000.], [1.,2.]) returns 1.
+
+LOOKUP(2000.,[1000.,2000.,3000.], [1.,2.]) returns 2.
+
+LOOKUP(3000.,[1000.,2000.,3000.], [1.,2.]) is an error.
+
+PID( PID(expression,target,low,high,K, Ti,Td,Tt,b)
+
+

Use PID (Proportional, Integral, Derivative) control to try to guide +expression towards target. The parameters are as follows
+low: lower bound on control representing the minimum value the control +value can take (e.g. for gate height this might be zero).

+

high: upper bound on control.

+

K: The constant representing the Proportion component of the control. +The constant multiplies (expression-target) to change a control value, +so choose a factor that is reasonable that takes the scaling of the +expression to the scaling of the control.

+

Ti: Integral time constant of control

+

Td: Derivative time constant of control.

+

Tt: Time basis of "anti-windup"

+

b: Set-point weighting (use 1.0 if you are new to PID).

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Output_Channel/index.html b/manual/reference/Output_Channel/index.html new file mode 100644 index 00000000..b7c0ddc7 --- /dev/null +++ b/manual/reference/Output_Channel/index.html @@ -0,0 +1,813 @@ + + + + + + + + + + + + + + + + + + Output Channel - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Output Channel

+

Overview:

+

The OUTPUT_CHANNEL table is used by both HYDRO and QUAL to specify +output requests inside of the channel. Output is HEC-DSS or text format. +The variables that can be requested vary by model.

+

Tables:

+ +

OUTPUT_CHANNEL

+

The table specifies the name for output request, as well as the +location, variable being output, time aggregation and destination file.

+

Field Descriptions

+
NAME
+

Name of the output request. This is part of the identifier of the table +and will be used in the B_PART of the output if it is in DSS format. +Generally, non-modelers will have an easier time understanding your +output if this is a station name that is geographically fixed (e.g. +"vernalis" or "RSAC075") than if it is a modeling construct ("ch101"). +Similarly, avoid using avoid using VARIABLE inside this name -- this +causes redundancy in the output DSS path and the layering won't work as +well.

+
CHAN_NO
+

Channel number in which output is requested.

+
DISTANCE
+

Distance along channel (from upstream node to downstream), typically in +feet. Results will be interpolated between Eulerian (HYDRO) or +Lagrangian (QUAL) computational points.

+
VARIABLE
+

Model variable to be output. In HYDRO, you can request stage,flow,vel. +In QUAL you can request stage,flow or the name of any constituent in the +model. When no output request is made for a constituent that is not +required for reaction kinetics, it is not calculted.

+
INTERVAL
+

Time Interval of the output. Can be any DSS-compliant interval with a +unit that is not calendar dependent (MIN, HOUR, DAY). This is a +departure from previous versions of DSM2, which offered monthly output.

+
PERIOD_OP
+

Period aggregation performed to convert the model time step into the +time interval of the output. May be INST or AVE, which produce +instantaneous

+
FILE
+

Name of the output file where the data will be stored. If the extension +.txt is given, the output is automatically in text format. If a .dss +extension is used, output is in HEC-DSS format.

+

Table Info

+
Identifier:
+

NAME, VARIABLE

+
Parent Table:
+

Table is parent

+
Include Block:
+

OUTPUT_TIME_SERIES

+

OUTPUT_CHANNEL_SOURCE_TRACK

+

This table is identical to OUTPUT_CHANNEL except it is only used in QUAL +and it contains one additional field for tracking constituent sources.

+

Field Descriptions

+
NAME
+

Name of the output request. See comments above, and note that in this +case you should also avoid using the SOURCE_NAME in the output name.

+
CHAN_NO
+

Channel number in which output is requested.DISTANCEDistance along +channel (from upstream node to downstream), typically in feet. Results +will be interpolated between Eulerian (HYDRO) or Lagrangian (QUAL) +computational points.

+
VARIABLE
+

Model variable to be output. In HYDRO, you can request stage,flow,vel. +In QUAL you can request stage,flow or the name of any constituent in the +model. When no output request is made for a constituent that is not +required for reaction kinetics, it is not calculted.

+
SOURCE_GROUP
+

Name of the source group that is being tracked in this output request. +To learn how to define a group, see group reference. The group used must +consist entirely of boundary or source locations -- not water bodies.

+
INTERVAL
+

Time Interval of the output. Can be any DSS-compliant interval with a +unit that is not calendar dependent (MIN, HOUR, DAY). This is a +departure from previous versions of DSM2, which offered monthly output.

+
PERIOD_OP
+

Period aggregation performed to convert the model time step into the +time interval of the output. May be INST or AVE, which produce +instantaneous

+
FILE
+

Name of the output file where the data will be stored. If the extension +.txt is given, the output is automatically in text format. If a .dss +extension is used, output is in HEC-DSS format.

+

Table Info

+
Identifier:
+

NAME, VARIABLE, SOURCE_GROUP

+
Parent Table:
+

Table is parent

+
Include Block:
+

OUTPUT_TIME_SERIES

+
    +
  • Finer output is preferred to daily. You can easily average to daily + later outside the model using a script or time series application. + Tidal data are poorly represented by daily aggregations, and + numerous incorrect conclusions have arisen from aliasing + (fluctuations over two weeks) when a 24 hour daily averaging + operation is imposed on a naturally 25-hour phenomenon. Monthly + output is no longer allowed.
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Output_Gate/index.html b/manual/reference/Output_Gate/index.html new file mode 100644 index 00000000..1195108d --- /dev/null +++ b/manual/reference/Output_Gate/index.html @@ -0,0 +1,668 @@ + + + + + + + + + + + + + + + + + + Output Gate - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Output Gate

+

Overview:

+

The OUTPUT_GATE table is used by both HYDRO to specify output requests +at a gate. Output is HEC-DSS or text format.

+

Tables:

+ +

OUTPUT_GATE

+

The table specifies the name for output request, as well as the +location, variable being output, time aggregation and destination file.

+

Field Descriptions

+
NAME
+

Name of the output request. This is the identifier of the table and will +be used in the B_PART of the output if it is in DSS format. It can be +the same as the gate name but it doesn't have to be. Avoid using +VARIABLE inside this name -- this causes redundancy in the output and +the layering won't work correctly.

+
GATE_NAME
+

Name of the gate at which output is requested.

+
DEVICE
+

Name of the gate device, if applicable. You can request operational or +physical data from a device as well as flow. You can also request some +gate output (install,flow) that is not linked to a particular device. In +this case, the field should be set to none

+
VARIABLE
+

Model variable to be output. From a device you can request some physical +data (width, height, elev), operational data +(op_to_node, op_from_node, position ) or flow oriented from water body +to node. From a gate with device=none you can request the +variables install, or total flow oriented from water body to node

+
INTERVAL
+

Time Interval of the output. Can be any DSS-compliant interval with a +unit that is not calendar dependent (MIN, HOUR, DAY). This is a +departure from previous versions of DSM2, which offered monthly output.

+
PERIOD_OP
+

Period aggregation performed to convert the model time step into the +time interval of the output. May be INST or AVE, but AVE can be +meaningless for a lot of gate variables.

+
FILE
+

Name of the output file where the data will be stored. If the extension +.txt is given, the output is automatically in text format. If a .dss +extension is used, output is in HEC-DSS format.

+

Table Info

+
Identifier:
+

NAME, VARIABLE

+
Parent Table:
+

Table is parent

+
Include Block:
+

OUTPUT_TIME_SERIES

+
+

Examples:

+

output_gate_example.inp 

+
+
    +
  • Initially, the thing that is hard to get about gate output is the + flow orientation. The output for the gate is oriented with the gate, + which may or may not be in the upstream-downstream direction
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Output_Reservoir/index.html b/manual/reference/Output_Reservoir/index.html new file mode 100644 index 00000000..6f7da000 --- /dev/null +++ b/manual/reference/Output_Reservoir/index.html @@ -0,0 +1,817 @@ + + + + + + + + + + + + + + + + + + Output Reservoir - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Output Reservoir

+

Overview:

+

The OUTPUT_RESERVOIR table is used by both HYDRO and QUAL to specify +output requests inside of a reservoir. Output is HEC-DSS or text format. +The variables that can be requested vary by model.

+

Tables:

+ +

OUTPUT_RESERVOIR

+

The table specifies the name for output request, as well as the +location, variable being output, time aggregation and destination file.

+

Field Descriptions

+
NAME
+

Name of the output request. This is part of the identifier of the table +and will be used in the B_PART of the output if it is in DSS format. +Avoid using VARIABLE inside this name -- this causes redundancy in the +output DSS path and the layering won't work as well.

+
RES_NAME
+

NAME of reservoir in which output is requested.

+
NODE
+

Node number, if the request is for a flow to a particular connected +node.

+
VARIABLE
+

Model variable to be output. In HYDRO, you can request stage,flow,vel. +In QUAL you can request stage,flow or the name of any constituent in the +model. When no output request is made for a constituent that is not +required for reaction kinetics, it is not calculted.

+
INTERVAL
+

Time Interval of the output. Can be any DSS-compliant interval with a +unit that is not calendar dependent (MIN, HOUR, DAY). This is a +departure from previous versions of DSM2, which offered monthly output.

+
PERIOD_OP
+

Period aggregation performed to convert the model time step into the +time interval of the output. May be INST or AVE, which produce +instantaneous

+
FILE
+

Name of the output file where the data will be stored. If the extension +.txt is given, the output is automatically in text format. If a .dss +extension is used, output is in HEC-DSS format.

+

Table Info

+
Identifier:
+

NAME, VARIABLE

+
Parent Table:
+

Table is parent

+
Include Block:
+

OUTPUT_TIME_SERIES

+

OUTPUT_RESERVOIR_SOURCE_TRACK

+

This table is identical to OUTPUT_RESERVOIR except it is only used in +QUAL and it contains one additional field for tracking constituent +sources.

+

Field Descriptions

+
NAME
+

Name of the output request. See comments above, and note that in this +case you should also avoid using the SOURCE_NAME in the output name.

+
RES_NAME
+

Name of reservoir in which output is requested.

+
NODE
+

Node number, if the request is for a flow to a particular connected +node. Otherwise, use none

+
VARIABLE
+

Model variable to be output. In HYDRO, you can request stage,flow,vel. +In QUAL you can request stage,flow or the name of any constituent in the +model. When no output request is made for a constituent that is not +required for reaction kinetics, it is not calculted.

+
SOURCE_GROUP
+

Name of the source group that is being tracked in this output request. +To learn how to define a group, see group reference. The group used must +consist entirely of boundary or source locations -- not water bodies.

+
INTERVAL
+

Time Interval of the output. Can be any DSS-compliant interval with a +unit that is not calendar dependent (MIN, HOUR, DAY). This is a +departure from previous versions of DSM2, which offered monthly output.

+
PERIOD_OP
+

Period aggregation performed to convert the model time step into the +time interval of the output. May be INST or AVE, which produce +instantaneous

+
FILE
+

Name of the output file where the data will be stored. If the extension +.txt is given, the output is automatically in text format. If a .dss +extension is used, output is in HEC-DSS format.

+

Table Info

+
Identifier:
+

NAME, VARIABLE, SOURCE_GROUP

+
Parent Table:
+

Table is parent

+
Include Block:
+

OUTPUT_TIME_SERIES

+
+
    +
  • Finer output is preferred to daily. You can easily average to daily + later outside the model using a script or time series application. + Tidal data are poorly represented by daily aggregations, and + numerous incorrect conclusions have arisen from aliasing + (fluctuations over two weeks) when a 24 hour daily averaging + operation is imposed on a naturally 25-hour phenomenon. Monthly + output is no longer allowed.
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/PTM_Output_Files/index.html b/manual/reference/PTM_Output_Files/index.html new file mode 100644 index 00000000..6943ba86 --- /dev/null +++ b/manual/reference/PTM_Output_Files/index.html @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + PTM Output Files - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

PTM Output Files

+

PTM outputs a trace.out and animation.bin file in addition to the .dss +files. 

+

The animation binary file outputs in Java binary format the snapshot +location of all particles in the simulation. 

+

The trace output file only records the event (timestamp) when each +particle passes from one waterbody to another waterbody.

+

All indices are internal global index of grid. All times are in Julian +time.

+

Table. Content in Trace.out

+ + + + + + + + + + + + + + + + + + + + + + + + +


+

1st col

2nd col

3rd col

4th col

header row

start time

end time

time step

total particle number

content row

event time

particle id

node id particle passing

waterbody particle entering

+ +

trace.out is written by ParticleObserver, which is incorporated in each +particle, then read by flux class.

+

Time is in Julian minute.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Particle_Filter/index.html b/manual/reference/Particle_Filter/index.html new file mode 100644 index 00000000..8d23d28e --- /dev/null +++ b/manual/reference/Particle_Filter/index.html @@ -0,0 +1,602 @@ + + + + + + + + + + + + + + + + + + Particle Filter - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Particle Filter

+

Overview

+

Particle Filter is a section in the PTM input where you set up +particle-filters. Particle Filter is designed to modify the particle +flux at node, without changing hydrodynamic condition, by keeping +particles from entering the specified waterbody.

+

Tables

+

Example

+
PARTICLE_FILTER 
+NAME       NODE WATERBODY FILLIN FILE                 PATH 
+filter_hor 8    chan:54   last   constant             0 
+filter_nf  280  chan:357  last   ./Filter_OP_NF.dss   /HIST+FILTER/FILTER_NF/FILTER_OP//IR-DECADE/DWR-BDO/  
+END
+
+

This is for a normal filter, which locates at a node connecting to any +waterbody. The PARTICLE_FILTER table defines particle filters by giving +them names, associating them to a node and a waterbody, and setting up +the passing efficiency (which could be constant value, or time-varying +data in DSS).

+

Field Descriptions

+
NAME
+

Name assigned to the particle filter. This is the identifier of the +filter used elsewhere to refer to the filter.

+
NODE
+

The ID of the node to which the filter is attached.

+
AT_WB
+

The type and ID of the waterbody to which the filter is attached .

+
FILLIN
+

Method for filling in data if the time step of the assigned series is +coarser than the time step of the model. See fillin types.

+
FILE
+

DSS or text file in which data are stored. Use consistent case when +referring to the same file. You may also enter the word constant if +you would like to assign a constant value to the input (the value will +be entered in the next column).**

+
PATH
+

The path within the text or DSS file of the time series data. If you +used the constant keyword in the Input File column, enter the value +here. The stored variable is particle passing efficiency, a float value +between 0 ~ 1: 0-block; 1-totally pass.

+

Filter locates on 1 node, at the side of 1 waterbody (channel, +reservoir, source flow). Every filter is unique with 1 node & 1 +waterbody.

+

Filter is 2-directional, and is designed to function differently for +each direction \<1> waterbody->node: filter serves as a totally block +with passing efficiency 0; \<2> node->waterbody: filter is designed +for changing particle decision-making, with passing efficiency as a +re-adjusting factor; thus it only functions when node is connected with +multiple waterbodies; i.e. it will not have any effect for a single +chain of channel

+

Currently there's no delay concept for filter, because the filter delay +rule is unknown, and our major concern is particles' fate (particle's +movement only depends on hydro conditions, not interacted with each +other)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Particle_Flux_Output/index.html b/manual/reference/Particle_Flux_Output/index.html new file mode 100644 index 00000000..617e4f9d --- /dev/null +++ b/manual/reference/Particle_Flux_Output/index.html @@ -0,0 +1,587 @@ + + + + + + + + + + + + + + + + + + Particle Flux Output - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Particle Flux Output

+

Overview:

+

PARTICLE_FLUX_OUTPUT is a section in the PTM text input that specifies +how the PTM records the number of particles in a group of water bodies +into a DSS output.

+

Example

+
FLUX_OUTPUT
+NAME         FROM_WB           TO_WB             INTERVAL FILENAME
+TWITCHELL    res:clifton_court group:swp         15MIN    ${PTMOUTPUTFILE}
+EMMATON      chan:216          group:cvp         15MIN    ${PTMOUTPUTFILE}
+DIVERSION_AG group:all         group:ag_div      15MIN    ${PTMOUTPUTFILE}
+END
+
+

Field Descriptions

+
NAME
+

This is the name that will go in the B_PART of the output.

+
FROM_WB
+

Name of the water body or group that is the "from" location of the flux.

+
TO_WB
+

Name of the water body or group that is the "to" destination in the +flux.

+
INTERVAL
+

Interval at which to record residence.

+
FILENAME
+

The name of the output file. If the file extension is *.dss, output is +in DSS format. If the file extension is *.txt a text file output is +produced.

+

Table Info

+
Identifier:
+

NAME

+

Particle flux output can be in absolute number of particles or +percentage of injection. The option is set by PTM_FLUX_PERCENT in the +SCALAR section.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Particle_Group_Output/index.html b/manual/reference/Particle_Group_Output/index.html new file mode 100644 index 00000000..3615fd99 --- /dev/null +++ b/manual/reference/Particle_Group_Output/index.html @@ -0,0 +1,560 @@ + + + + + + + + + + + + + + + + + + Particle Group Output - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Particle Group Output

+

Overview:

+

PARTICLE_GROUP_OUTPUT is a section in the PTM input that specifies DSS +output to record residence of particles in group of water bodies.

+

Example

+
PARTICLE_GROUP_OUTPUT 
+NAME       GROUP_NAME INTERVAL   FILENAME 
+TWITCHELL  twitchell  1HOUR      ${PTMOUTPUTFILE} 
+EMMATON    emmaton    1HOUR      ${PTMOUTPUTFILE} 
+END
+
+

Field Descriptions

+
NAME
+

This is the output name that will go in the B_PART of the output.

+
GROUP_NAME
+

Name of the group defined in GROUP.

+
INTERVAL
+

Interval at which to record residence.

+
FILENAME
+

The name of the output file. If the file extension is *.dss, output is +in DSS format. If the file extension is *.txt a text file output is +produced.

+

Table Info

+
Identifier:
+

NAME

+

Similar as particle flux output.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Particle_Insertion/index.html b/manual/reference/Particle_Insertion/index.html new file mode 100644 index 00000000..b430dd08 --- /dev/null +++ b/manual/reference/Particle_Insertion/index.html @@ -0,0 +1,565 @@ + + + + + + + + + + + + + + + + + + Particle Insertion - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Particle Insertion

+

Overview

+

Particle Insertion is a section in the PTM input that insertion of +particles in water bodies over time. The PTM can insert multiple sets of +particles.

+

Tables

+

Example

+
PARTICLE_INSERTION 
+NODE  NPARTS   DELAY  DURATION     
+1     1000     0hour  1day     
+13    1000     1day   0hour     
+END
+
+

The Rate Coefficient Table lists reaction rate coefficients for +non-conservative constituents. Different rates can be assigned to +different water bodies. The assignment is done using groups -- first you +define a +group and +then you assign rate coefficients to the group.

+

Field Descriptions

+
NODE
+

The node at which the insertion is made.

+
NPARTS
+

Number of particles.

+
DELAY
+

Delay before the first insertion after the beginning of the PTM runs. +The unit of time needs to be attached without spaces.

+
DURATION
+

Interval over which insertion is evenly distributed in time. If the time +is set as zero, all the particles are inserted instantaneously. The unit +of time needs to be attached without spaces.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Particle_Reservoir_Filter/index.html b/manual/reference/Particle_Reservoir_Filter/index.html new file mode 100644 index 00000000..57d7c92d --- /dev/null +++ b/manual/reference/Particle_Reservoir_Filter/index.html @@ -0,0 +1,589 @@ + + + + + + + + + + + + + + + + + + Particle Reservoir Filter - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Particle Reservoir Filter

+

Overview

+

Particle Reservoir Filter is a section in the PTM input where you set +up particle-filters. Particle Reservoir Filter is designed to modify the +particle flux at reservoir, without changing hydrodynamic condition, by +keeping particles from entering the specified waterbody.

+

Tables

+

Example

+
PARTICLE_RES_FILTER 
+NAME          RES_NAME      WATERBODY          FILLIN FILE           PATH 
+clfc_div_bbid clifton_court qext:dicu_div_bbid last   ./filterOp.dss /HIST+FILTER/CLFC_DIV/FILTER_OP//IR-DECADE/DWR-BDO/  
+END
+
+

This is a special filter, which locates at a reservoir directly +connecting to a source flow. The PARTICLE_RES_FILTER table defines +particle filters by giving them names, associating them to a reservoir +and one of its directly connecting waterbody, and setting up the passing +efficiency. (which could be constant value, or time-varying data in +DSS).

+

Field Descriptions

+
NAME
+

Name assigned to the particle filter. This is the identifier of the +filter used elsewhere to refer to the filter.

+
RES_NAME
+

The name of the reservoir to which the filter is applied.

+
AT_WB
+

The type and ID of the waterbody to which the filter is attached .

+
FILLIN
+

Method for filling in data if the time step of the assigned series is +coarser than the time step of the model. See fillin types.

+
FILE
+

DSS or text file in which data are stored. Use consistent case when +referring to the same file. You may also enter the word constant if +you would like to assign a constant value to the input (the value will +be entered in the next column).**

+
PATH
+

The path within the text or DSS file of the time series data. If you +used the constant keyword in the Input File column, enter the value +here. The stored variable is particle passing efficiency, a float value +between 0 ~ 1: 0-block; 1-totally pass. **

+

Similar as Particle Filter

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Rate_Coefficients/index.html b/manual/reference/Rate_Coefficients/index.html new file mode 100644 index 00000000..a41796a9 --- /dev/null +++ b/manual/reference/Rate_Coefficients/index.html @@ -0,0 +1,601 @@ + + + + + + + + + + + + + + + + + + Rate Coefficients - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Rate Coefficients

+

Overview

+

Rate Coefficients are reaction and growth rates assigned to +non-conservative constituents. This table assigns the rates to groups of +water bodies (there usually aren't enough data to support individual +assignments).

+

Tables

+

Example

+
# sample algae rate coefficients in a channel group
+RATE_COEFFICIENT
+GROUP_NAME  CONSTITUENT  VARIABLE  VALUE 
+chan_10_15                   algae       alg_die    0.2 
+chan_10_15                   algae       alg_grow   1.5 
+chan_10_15                   algae       alg_resp  0.15 
+chan_10_15                   algae       settle     0.2 
+END
+
+

The Rate Coefficient Table lists reaction rate coefficients for +non-conservative constituents. Different rates can be assigned to +different water bodies. The assignment is done using groups -- first you +define a +group and +then you assign rate coefficients to the group.

+

Field Descriptions

+
GROUP_NAME
+

Name of the group to which the coefficient entry is assigned.

+
CONSTITUENT
+

Non-conservative constituent with which coefficient is associated.

+
VARIABLE
+

Physical process governed by coefficient.

+
VALUE
+

Value assigned to the coefficient

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

QUAL_SPATIAL

+

Assignments on higher layers supersede assignments on lower layers, even +if the patterns that cause the assignment are not the same.

+

All channels must have rate coefficients for non-conservative DO runs.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Reservoir/index.html b/manual/reference/Reservoir/index.html new file mode 100644 index 00000000..04c87695 --- /dev/null +++ b/manual/reference/Reservoir/index.html @@ -0,0 +1,875 @@ + + + + + + + + + + + + + + + + + + Reservoir - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Reservoir

+

Overview

+

Reservoirs are open bodies of water that store flow and are connected +to nodes by means of an energy-based equation. Reservoirs are considered +instantly well-mixed.

+
    +
  • The Reservoirs Table specifies the identity and physical properties + of the reservoir.
  • +
  • Connections to nodes are specified in the Reservoir Connections + table. 
  • +
  • Reservoir area as a function of elevation is specified in Reservoir + volume table, while volume is calculated in code (since 8.2)
  • +
+

RESERVOIR Table

+

A sample is given below

+

Example

+
# Description:
+# Setting of Clifton Court Forebay
+RESERVOIR
+NAME  AREA  BOT_ELEV   
+clifton_court       91.868000   -7.748      
+END
+
+

The RESERVOIR table defines the name and physical properties of the +reservoir. In the case of a "tank" like reservoir the area and volume +are simply defined by the constant area and bottom elevation times the +constant area, respectively.

+

Field Descriptions

+
NAME
+

Name of the reservoir. This is the identifier of the reservoir used in +other tables.

+
AREA
+

Surface area (in units of million sq ft) of the reservoir at typical +depth. This area is used to calculate volume changes.

+
BOT_ELEV
+

Elevation (ft) of the bottom of the reservoir.

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

GRID

+

RESERVOIR_CONNECTION Table

+

See sample below

+

Example

+
# Description:
+# Setting of Frank Tract Connections
+RESERVOIR_CONNECTION
+
+RES_NAME  NODE  COEF_IN  COEF_OUT   
+franks_tract        103   2250.000  2250.000     
+franks_tract        216   1500.000  1500.000    
+END
+
+

The RESERVOIR_CONNECTION table lists reservoir connections to +neighboring nodes. Flow through reservoir connections is calculated +using the following formula

+

Q = Cto sqrt[ 2g(znode - zres) ] ... +zres \< znode

+

Q = Cfrom sqrt[ 2g(zres - znode) ] +... zres > znode

+

Where:

+
    +
  • Cto and Cfrom are coefficients representing + the hydraulic efficiency of the reservoir connection and the nominal + Area perpendicular to flow.
  • +
  • g is gravity and
  • +
  • zres and znode are the water surface + elevations at the reservoir and node (node surface is assessed by + means of a reference channel that has no reservoirs attached to it).
  • +
+

Field Descriptions

+
RES_NAME
+

Name of reservoir at which connection is specified.

+
NODE
+

Number identifying the node at which connection is specified.

+
COEF_IN
+

Coefficient from node to reservoir, greater than zero. If you compare +the reservoir equation to the gate or other orifice equation you will +find that the reservoir coefficient actually folds several quantities +into one parameter: a flow efficiency (between zero and one) and a area +of flow. If you have an observation of the area normal to flow, the +coefficient should be some fraction of this aperture.

+
COEF_OUT
+

Flow direction out of the reservoir.

+

Table Info

+
Identifier:
+

RES_NAME, NODE

+
Parent Table:
+

RESERVOIR

+
Parent Identifier:
+

RES_NAME

+
Include Block:
+

GRID

+

A node may not have more than three reservoir connections and must have +at least one ungated channel connection.

+

RESERVOIR_VOL Table

+

See sample below

+
RESERVOIR_VOL
+RES_NAME                 ELEV          AREA
+liberty                -61.975          0.000
+liberty                -32.808          2.478
+liberty                -16.404         16.220
+liberty                 -3.281        272.328
+liberty                 -1.640       1017.270
+liberty                  0.000       1999.522
+liberty                  1.640       3031.999
+liberty                  3.281       4209.851
+liberty                  4.921       4584.028
+liberty                  6.562       5190.456
+liberty                  8.202       6359.679
+liberty                  9.843       6636.050
+liberty                 13.123       6731.118
+liberty                 16.404       6830.894
+liberty                 19.685       6876.916
+liberty                 22.966       6890.138
+END
+
+

Since version 8.2, reservoirs can also have variable area and volume +defined as a function of elevation. This table still requires the +reservoir to be defined in the RESERVOIR table even though the elevation +area specified in the RESERVOIR table will be ignored if it is specified +here.

+

Field Descriptions

+
RES_NAME
+

Name of the reservoir. This is the identifier of the reservoir should +have been specified in the RESERVOIR table (Elevation and area from that +table are ignored)

+
ELEV
+

Elevation (ft) of the reservoir at which the area and volume are +specified. This elevation is to the datum of the rest of model +(currently NAVD88)

+
AREA
+

Surface area (in acres) of the reservoir at specified elevation. The +area is interpolated between elevations based on the current elevation +of the water level

+

(Reference: Annual Report 2015, Chapter 2). See Figure below for a +snippet

+
VOLUME
+

Volume (in units of acre-ft) of the reservoir at specified elevation. +The volume is calculated as explained here.

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

GRID

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Reservoir_Concentration/index.html b/manual/reference/Reservoir_Concentration/index.html new file mode 100644 index 00000000..7b9b9646 --- /dev/null +++ b/manual/reference/Reservoir_Concentration/index.html @@ -0,0 +1,622 @@ + + + + + + + + + + + + + + + + + + Reservoir Concentration - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Reservoir Concentration

+

Overview

+

Reservoir concentration represents the concentration of constituents +attached to reservoir sources.

+

Tables

+

Example

+
# Description:
+# BBID EC concentration in Clifton Court Forebay
+RESERVOIR_CONCENTRATION
+NAME            RES_NAME      VARIABLE FILLIN FILE                                          PATH   
+dicu_drain_bbid clifton_court ec       last   ../../timeseries/dicuwq_3vals_extended.dss    /DICU-HIST+RSVR/BBID/DRAIN-EC//1MON/DWR-BDO/           
+END
+
+

The RESERVOIR_CONCENTRATION table attaches concentrations to boundary +and source flows defined in QUAL. The table also assigns a time series +to the source.

+

Field Descriptions

+
NAME
+

Name assigned to the source. An entry here must have the same name as an +entry in the BOUNDARY_STAGE, BOUNDARY_FLOW or SOURCE_FLOW tables -- by +matching names you will attach concentrations to the flow.

+
NODE_NO
+

Node number where the flow is applied. This must match the node number +given in the original flow table (it is a bit redundant, but easier to +look things up).

+
VARIABLE
+

Constituent name. If no output is requested for the constituent +currently it will be ignored.

+
FILLIN
+

Method for filling in data if the time step of the assigned series is +coarser than the time step of the model.

+
FILE
+

DSS or text file in which data are stored. Use consistent case when +referring to the same file. You may also enter the word constant if +you would like to assign a constant value to the input (the value will +be entered in the next column).

+
PATH
+

The path within the text or DSS file of the time series data. If you +used the constant keyword in the Input File column, enter the value +(e.g. 4.22) here.

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

QUAL_TIME_SERIES

+

Multiple sources and sinks can be assigned to a reservoir. They are +often kept separate in order to assign different concentrations to them.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Reservoir_Initial_Condition/index.html b/manual/reference/Reservoir_Initial_Condition/index.html new file mode 100644 index 00000000..e88ebbcb --- /dev/null +++ b/manual/reference/Reservoir_Initial_Condition/index.html @@ -0,0 +1,589 @@ + + + + + + + + + + + + + + + + + + Reservoir Initial Condition - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Reservoir Initial Condition

+

Overview

+

HYDRO requires a water surface initial condition at reservoirs. The +Reservoir IC view allows the user to specify default initial conditions. +The default initial condition is required, but will be overridden if a +restart file is used.

+

Tables

+

Example

+
# Description:
+# Initial Condition of Clifton Court Forebay
+RESERVOIR_IC 
+RES_NAME          STAGE   
+clifton_court     5.000          
+END
+
+

The RESERVOIR_CONCENTRATION table attaches concentrations to boundary +and source flows defined in QUAL. The table also assigns a time series +to the source.

+

Field Descriptions

+
RES_NAME
+

Name of reservoir where initial condition is to be applied.

+
STAGE
+

Initial water surface elevation.

+

Table Info

+
Identifier:
+

NAME

+
Include Block:
+

INITIAL_CONDITION

+

Default initial values are replaced if a restart file is used.

+

Currently, QUAL cannot take a spatially distributed default initial +condition for constituent concentrations. It has only a single scalar.

+

Consistency should be maintained between initial reservoir-channel stage +differences and flows at nodes. If the reservoir is assigned a different +initial stage than surrounding channels, the head difference implies a +flow described by the reservoir equations (see Reservoir View). Unless +you are very careful to balance the implied flow with other channel +flows, the initial time step will have a mass imbalance. If what you +want is an easy initial condition, try making stage in the reservoir +equal to the stage in all the surrounding channels and making the +initial flow zero. Alternatively, you can put up with the imbalance -- +just run HYDRO for an hour or so extra before the start of any QUAL run +you want do.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Scalar/index.html b/manual/reference/Scalar/index.html new file mode 100644 index 00000000..aa2d1ab3 --- /dev/null +++ b/manual/reference/Scalar/index.html @@ -0,0 +1,590 @@ + + + + + + + + + + + + + + + + + + Scalar - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Scalar

+

Overview

+

Scalars are scalar model variables used to specify model-wide +numerical properties and echoed output levels. They are the equivalent +of the text input SCALAR section. All of the parameters are interpreted +as text and can be replace by ENVVARS.

+

Tables

+

Example

+
SCALAR 
+NAME  VALUE   
+binary_output                       false     
+checkdata                           false     
+cont_bad                            false     
+cont_missing                        true     
+END
+
+

The SCALAR table comprises name-value pairs for scalars. The scalars +that are allowed depend on the specific model.

+

Field Descriptions

+
NAME
+

Name of the parameter. This is the identifier of the parameter.

+
VALUE
+

Value assigned to the parameter. These are interpreted by the model +first as text (to allow substitution using ENVVARS) and then converted +to the correct data type and validated. For boolean (true/false) one +letter is sufficient.

+

Table Info

+
Identifier:
+

NAME

+
Parent Table:
+

Table is parent

+
Include Block:
+

PARAMETER

+

Generally you will work with the standard parameters distributed with +DSM2. You always have to provide RUN_START_DATE, RUN_END_DATE as the +defaults are deliberately designed to halt the model.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Source_Flow/index.html b/manual/reference/Source_Flow/index.html new file mode 100644 index 00000000..d15f9fe2 --- /dev/null +++ b/manual/reference/Source_Flow/index.html @@ -0,0 +1,621 @@ + + + + + + + + + + + + + + + + + + Source Flow - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Source Flow

+

Overview

+

Source flows represent inflows and outflows in the interior of the model +domain at nodes. An entry here creates a source and assigns a time +series of in/outflows to it.

+

Tables

+

Example

+
# Description:
+# Historical source flow at Tracy Pump
+SOURCE_FLOW
+NAME      NODE SIGN FILLIN FILE                 PATH                                               
+cvp       181   -1  last   ${BNDRYINPUT}        /FILL+CHAN/CHDMC004/FLOW-EXPORT//1DAY/${HISTFLOWVERSION}/    
+END
+
+

The node SOURCE_FLOW table defines sources and sinks by giving them +names and associating them to a node. The table also assigns a time +series to the source.

+

Field Descriptions

+
NAME
+

Name assigned to the source. This is the identifier of the boundary and +is referred to elsewhere in the input system. If you assign water +quality you will use the same name in order to match concentration to +flow.

+
NODE
+

Node number at which the source is applied.

+
SIGN
+

Forces the time series to be a source or a sink. Positive values are +normally associated with a source, but the data (especially sinks such +as agricultural diversions) are sometimes measured in absolute flow. Use +1 to force the value to be a positive source or -1 to interpret values +as a sink.

+
FILLIN
+

Method for filling in data if the time step of the assigned series is +coarser than the time step of the model. See fillin types

+
FILE
+

DSS or text file in which data are stored. Use consistent case when +referring to the same file. You may also enter the word constant if you +would like to assign a constant value to the input (the value will be +entered in the next column).

+
PATH
+

The path within the text or DSS file of the time series data. If you +used the constant keyword in the Input File column, enter the value +(e.g. 4.22) here.

+

Table Info

+
Identifier:
+

NAME

+
    +
  • Multiple sources and sinks can be assigned to a node. They are + usually kept separate in order to assign different concentrations to + them.
  • +
  • HYDRO is able to accept sources and sinks at boundary nodes, but + this is not good modeling practice. Use them on the interior.
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Source_Flow_Reservoir/index.html b/manual/reference/Source_Flow_Reservoir/index.html new file mode 100644 index 00000000..00d304d0 --- /dev/null +++ b/manual/reference/Source_Flow_Reservoir/index.html @@ -0,0 +1,617 @@ + + + + + + + + + + + + + + + + + + Source Flow Reservoir - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Source Flow Reservoir

+

Overview

+

Reservoir source flows represent mass inflows and outflows in the +interior  of the model domain at reservoirs. An entry here creates a +source and assigns a time series of in/outflows to it.

+

Tables

+

Example

+
# Description:
+# Historical source flow at Bank Pump
+SOURCE_FLOW_RESERVOIR
+NAME RES_NAME      SIGN FILLIN FILE                               PATH  
+swp  clifton_court -1   last   ../../timeseries/hist_19902012.dss /FILL+CHAN/CHSWP003/FLOW-EXPORT//1DAY/DWR-DMS-201203/    
+END
+
+

The SOURCE_FLOW_RESERVOIR table defines sources and sinks by giving them +names and associating them to a reservoir. The table also assigns a time +series to the source.

+

Field Descriptions

+
NAME
+

Name assigned to the source. This is the identifier of the boundary and +is referred to elsewhere in the input system. If you assign water +quality you will use the same name in order to match concentration to +flow.

+
RES_NAME
+

Name of reservoir at which the source is applied.

+
SIGN
+

Forces the time series to be a source or a sink. Positive values are +normally associated with a source, but the data (especially sinks such +as agricultural diversions) are sometimes measured in absolute flow. Use +1 to force the value to be a positive source or -1 to interpret values +as a sink.

+
FILLIN
+

Method for filling in data if the time step of the assigned series is +coarser than the time step of the model. See fillin types

+
FILE
+

DSS or text file in which data are stored. Use consistent case when +referring to the same file. You may also enter the word constant if you +would like to assign a constant value to the input (the value will be +entered in the next column).

+
PATH
+

The path within the text or DSS file of the time series data. If you +used the constant keyword in the Input File column, enter the value +(e.g. 4.22) here.

+

Table Info

+
Identifier:
+

NAME

+

Multiple sources and sinks can be assigned to a reservoir. They are +usually kept separate in order to assign different concentrations to +them.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Source_Tracking/index.html b/manual/reference/Source_Tracking/index.html new file mode 100644 index 00000000..914ebd99 --- /dev/null +++ b/manual/reference/Source_Tracking/index.html @@ -0,0 +1,495 @@ + + + + + + + + + + + + + + + + + + Source Tracking - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Tidefile/index.html b/manual/reference/Tidefile/index.html new file mode 100644 index 00000000..faec9bff --- /dev/null +++ b/manual/reference/Tidefile/index.html @@ -0,0 +1,620 @@ + + + + + + + + + + + + + + + + + + Tidefile - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Tidefile

+

Overview

+

The tidefile is the HDF5-formatted binary file used to pass flow and +geometry date from HYDRO to QUAL and PTM. The tidefile is specified as +output by HYDRO in the +IO_FILE +table. It is specified as input to QUAL and PTM in the TIDEFILE section. +Input tidefiles can be specified only in text.

+

Tidefiles can be stacked if desired, but this is an old feature that is +now deprecated. Stacking means that the flow simulation can be divided +temporally among several HYDRO runs and then the resulting tidefiles +used sequentially in QUAL or PTM.

+

Tables

+

Example

+
TIDEFILE     
+START_DATE END_DATE FILENAME   
+runtime    length   ${HYDROTIDEFILE} # begin run to 20JUL  
+END 
+
+

The following example uses one tidefile with an environmental variable +for the file name. This is the most treatment.

+

Example

+
TIDEFILE     
+
+
+
+
+START_DATE  END_DATE   FILENAME   
+runtime     20JUL1996  hist1.h5      # beginning of run to 20JUL  
+20JUL1996   24JUL1996  hist2.h5   
+last        length     hist3.h5      # end of previous to end of run  
+01SEP1996   length     ficticious.h5 # no error: will never be opened   
+END 
+
+

This example uses several tidefiles tiled together to cover a longer +period. Please let us know if you need this functionality, as it is a +holdover from the old "repeating tide" days and will probably be +deprecated.

+

Field Descriptions

+
START_DATE
+

When to start using the tidefile. Tidefiles must be listed in temporal +order. The START_DATE of the first tidefile must fall on or before the +start of the run. The START_DATE of subsequent tidefiles must exactly +coincide with the END_DATES of preceding tidefiles. There is no +associated "TIIME" part == tidefiles must be matched on calendar days. +If a START_DATE is not given or is listed as "none", the timestamp in +the tidefile will be used for the start. There are some special +keywaords that can be used with START_DATE:

+
    +
  • runtime: start time in tidefile
  • +
  • last: use this tidefile when the previous tidefile ends
  • +
  • none: use default.
  • +
+
END_DATE
+

When to stop using the tidefile. IF not given, the tidefile is used +until it ends. The END_DATE of the last tidefile must overlap the +runtime of the simulation. Note that this can be a little tricky because +the ending time is the time 0000 of the END_DATE, so you may need +another day. You can avoid this sort of problem by specifying your run +dates with standard times (0000  instead of military 2400). There are +some special keywords that can be used with END_DATE:

+
    +
  • length: use all of the tidefile, up until its end
  • +
  • none: use default.
  • +
+
FILENAME
+

Name of the file. Use upper/lower case consistently because filenames +are case sensitive.

+

Table Info

+
Identifier:
+

FILENAME

+

ENVVARs are often used for names of files, DSS paths, parameters that +are varied over a study -- the substitution will occur at runtime.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/Transfer/index.html b/manual/reference/Transfer/index.html new file mode 100644 index 00000000..37655721 --- /dev/null +++ b/manual/reference/Transfer/index.html @@ -0,0 +1,579 @@ + + + + + + + + + + + + + + + + + + Transfer - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Transfer

+

Overview

+

Transfers are direct water connections from a reservoir or node to +another reservoir or node. Transfers are instantaneous movements of +water (and its constituents and particles) without any detailed +description of physics or storage. The Transfer View specifies the +connectivity of the transfer. A time series must also be listed in the +Transfer Time Series View to specify the flow -- the default is zero.

+

Tables

+

Example

+
# Description:
+# Sample transfer from a reservoir to a node
+
+TRANSFER 
+NAME       FROM_OBJ  FROM_IDENTIFIER TO_OBJ TO_IDENTIFIER 
+transfer_1 reservoir res_1           node   6  
+END
+
+

The Transfer table defines the name and connectivity of the transfer. +The flow is a time series input specified in +TRANSFER_TIME_SERIES

+

Field Descriptions

+
NAME
+

Name of the transfer. This is the identifier of the transfer used in +other GUI views.

+
FROM_OBJ
+

Type (node or reservoir) of the source object.

+
FROM_IDENTIFIER
+

Identifier (node number or reservoir name) of the source destination +object.

+
TO_OBJ
+

Type (node or reservoir) of the destination object.

+
TO_IDENTIFIER
+

Identifier (node number or reservoir name) of the destination object.

+

In previous versions of DSM2, Transfers were called "obj2obj".

+

To complete the specification of a Transfer, a time series or constant +flow must be attached to it in the Transfer Time Series table.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/manual/reference/index.html b/manual/reference/index.html new file mode 100644 index 00000000..2d9b3dc6 --- /dev/null +++ b/manual/reference/index.html @@ -0,0 +1,501 @@ + + + + + + + + + + + + + + + + + + Input Tables - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + + + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/overview/index.html b/overview/index.html new file mode 100644 index 00000000..88b90896 --- /dev/null +++ b/overview/index.html @@ -0,0 +1,523 @@ + + + + + + + + + + + + + + + + + + Documentation - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Documentation

+

Overview:

+

Welcome to DSM2. This documentation is an ever-developing effort to get +you started using the model. In learning DSM2 you will want to make use +of the following resources

+
    +
  • The tutorials, which will get you used to the user interface and + running the model. There are two major groups of tutorials: one + builds up a simplified model from the channels to boundaries to + operating rules. Its instructions are very explicit. The second + group of how-to tutorials focuses on large practical problems, + examples of real studies on the Sacramento-San Joaquin Delta.
  • +
  • The documentation. This documentation explains how to use the + graphical and text input of the model.
  • +
  • Fellow modelers and water managers.
  • +
+

Data management

+

Input data for DSM2 is managed using a combination of a relational +database with a graphical interface for tabular input, HEC-DSS for time +series storage and a small amount of text inputs for frequently-changing +components of the model.

+

There is a section in this documentation for each input table. There is +also a section for each type of text input. The two do not overlap, +except for scalars (text is a last minute opportunity to change a +scalar) and output (you can add output in text).

+

User Interface

+

The DSM2 database interface is shown below. Important nomenclature is +identified -- the terms in this figure are used freuently in this +documentation.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Background_Material_and_References/index.html b/reference/Background_Material_and_References/index.html new file mode 100644 index 00000000..e046aabe --- /dev/null +++ b/reference/Background_Material_and_References/index.html @@ -0,0 +1,565 @@ + + + + + + + + + + + + + + + + + + + + + + Reference - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Background Material and References

+

DSM2 Hydro is based on the FourPt computer program written by Lew DeLong in USGS. The original documentation for FourPt is +available here. DSM2 adds improvements and enhancements to the FourPt model, including +an improved input and output system.

+

DSM2 Qual is roughly based on QUAL-2E + and the Branched Langrangian +Transport Model (BLTM) written by Harvey Jacobson of USGS. 

+

Download DSM2 Versions

+

DSM2 has had many versions over the past 30 years. Some of the recent ones are available here

+

Downloads of DSM2 Versions

+

References:

+

QUAL2E +Documentation - Basis for QUAL Nonconservative Constituent +Kinetics.pdf
+ +D1641rev.pdf
+ +EC_chloride_bromide_05_29_01.pdf
+ Delta +D1641 Water Quality Standards Full +Reference.pdf
+ +BLTMenhancements-USGSWRI97_4050.pdf
+ Four +Point memo from USGS-basis for DSM2 +HYDRO.pdf

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Background_Slide_Material/index.html b/reference/Background_Slide_Material/index.html new file mode 100644 index 00000000..448a6a81 --- /dev/null +++ b/reference/Background_Slide_Material/index.html @@ -0,0 +1,510 @@ + + + + + + + + + + + + + + + + + + Background Slide Material - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + + + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/CALSIM_-_DSM2_Integration/index.html b/reference/CALSIM_-_DSM2_Integration/index.html new file mode 100644 index 00000000..9ab8128a --- /dev/null +++ b/reference/CALSIM_-_DSM2_Integration/index.html @@ -0,0 +1,625 @@ + + + + + + + + + + + + + + + + + + CALSIM - DSM2 Integration - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + +

CALSIM - DSM2 Integration

+

Background

+

CALSIM is a water operations simulation model. It meets demands using +reservoir release operations and other operational criteria. A crucial +operational criterion is meeting the salinity and X2 standards in the +Delta.

+

CALSIM relies on DSM2 simulation of water quality standards. However +DSM2 is computationally expensive to run in repeated scenarios needed by +CALSIM. CALSIM relies on a linear programming approach and needs flow +salinity relationships to estimate the flow needed to meet a particular +water quality standard. Furthermore CALSIM is a monthly model and needs +to make assumptions pertaining to that limitation. 

+

Artificial Neural Networks (ANNs)

+

To make it computationally feasible, the flow salinity relationships are +derived from DSM2 simulations with perturbations of inputs that are of +concern to CALSIM. These flow relationship information is used as +training data for Artificial Neural Networks (ANNs); more specifically +Feed-forward Neural Networks (FNNs).  These ANNs then are surrogate +models for DSM2 and are supposed to represent the impact of operations +on X2 and salinity standards.

+

Full circle analysis

+

To verify the results derived from having a surrogate DSM2 (ANN) model +in CALSIM, the CALSIM flows and gate conditions are converted into daily +inputs (with assumptions for monthly to daily) for DSM2 and the output +salinity is checked against the X2 or salinity standards in CALSIM. This +is called a "full circle analysis".  Typically these have been done for +a select period of 16 years but can be extended to the entire period of +82 years of simulation if desired.

+

DSM2 boundary conditions

+

DSM2 needs flow and stage boundary conditions, i.e. the inputs at the +edges of the domain that would drive the simulation.

+
    +
  1. Flow boundaries: CALSIM operates the reservoirs upstream of the + Delta and as a result the flow conditions are established by CALSIM + simulations, though on a monthly time step resolution.
  2. +
  3. Gate positions:  CALSIM operates these to satisfy regulations and + other constraints. 
  4. +
  5. Stage boundary: The only one is the ocean boundary at Martinez that + is derived from astronomical stage at San Francisco with regression + using historical data to transfer to Martinez (Planning tide + generator)
  6. +
  7. Martinez EC boundary: This is derived from a flow salinity + relationship based on G model and stage boundaries (Planning + Martinez EC generator)
  8. +
  9. Vernalis EC boundary: Derived from flow regression equations.
  10. +
  11. Consumptive Use: These are represented in DSM2 at 258 nodes, CALSIM + does not directly simulate these, however they are provided as input + to CALSIM based on consumptive use models
  12. +
  13. Agricultural Drain EC: These are the most uncertain of the boundary + conditions and are represented in DSM2 as annually repeating values.
  14. +
  15. Waste water treatment plants ??
  16. +
+

Implementation

+

These boundary conditions are explicitly mapped in this document between +the CALSIM and DSM2 schematics. Schematics and Boundaries

+

Resolving Monthly - Daily conversions

+

CALSIM is a monthly time step model and DSM2 runs on 15 min or lower +time steps. The input data for CALSIM is monthly averaged i.e. a single +value for the entire month. DSM2 typically takes daily input values and +is also capable of hourly or sub hourly resolved values.  This mismatch +has to be resolved when doing this integration.

+

For daily to monthly conversions, it is simply a monthly averaging +technique. For certain quantities, such as gate positions, a count of +values may be computed ?

+

For monthly to daily conversions, there is huge impedance. This means a +lot of information that is lost has to be either estimated or left as +the same value repeated over the days of the month. This is usually the +case for the flows, except that for stability reasons ( hydrodynamic +models ) the transition days between months employ a volume conserving +spline to smooth the transition. 

+
    +
  • Discuss daily variation issue here
  • +
+

Version Control

+

CALSIM and DSM2 have different versions, evolving at different rates for +different needs. As a result is important to manage these versions and +the mappings between them. Draft_CALSIMII_DCU_Modification_081809

+
    +
  • What if CALSIM schematic changes?  Implication for the integration + above?
  • +
+

Notes

+

Martinez stage has been adjusted a little bit on 24DEC1967 to overcome a +dry-up breakdown at channel 201. The correction reside in a +timeseries ${DSM2}\timeseries\Planning_Tide_82years.dss. Planning study +users should add it to replace the regular timeseries.

+

Attachments:

+

+bat_prep.png (image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Checklists/index.html b/reference/Checklists/index.html new file mode 100644 index 00000000..57dd8ef1 --- /dev/null +++ b/reference/Checklists/index.html @@ -0,0 +1,509 @@ + + + + + + + + + + + + + + + + + + Checklists - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Checklists

+

Checklists are a great way to codify quality checks to avoid obvious and +typical errors for a task. 

+

Historical simulations

+

Bob Suits put together a checklist used for +historical simulation updates. 

+

Historical Simulation Checklist

+

Planning simulations

+
    +
  • Need a similar checklist for planning runs Yu (Joey) Zhou ? 
  • +
+

Attachments:

+

+verifying_historical_simulation_101620.pdf +(application/pdf)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/DSM2_Cloud_Setup/index.html b/reference/DSM2_Cloud_Setup/index.html new file mode 100644 index 00000000..23fc94a3 --- /dev/null +++ b/reference/DSM2_Cloud_Setup/index.html @@ -0,0 +1,486 @@ + + + + + + + + + + + + + + + + + + DSM2 Cloud Setup - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 Cloud Setup

+

DSM2 has been compiled on Linux and tested against the Windows results. +The output from hydro and qual is similar thought not exact (floating +point level differences)

+

AWS (Amazon Web Services) Cloud services have been used to run DSM2 +using AWS Linux AMI (Amazon Machine Image). This requires a user to +start linux VM and then download and run DSM2 on that VM.

+

Docker install on AWS +Linux

+

A serverless approach to this would be that the user submits a batch job +consisting of a specification of what container (Docker) to be used and +a zip file with the inputs. The batch job is then run on a suitable +machine and the resulting output file is zipped and uploaded to S3 (AWS +Simple Storage System)

+

The serverless approach allows for submission of multiple concurrent +jobs that provide the ability to do many parallel runs at the same time. +The charges are on persecond basis making efficient use of computing +resources.

+

The use of the cloud to run a batch DSM2 PTM is here: How to Run a DCP +PTM Batch Job on +AWS

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Historical_Simulation_Checklist/index.html b/reference/Historical_Simulation_Checklist/index.html new file mode 100644 index 00000000..016f6f18 --- /dev/null +++ b/reference/Historical_Simulation_Checklist/index.html @@ -0,0 +1,949 @@ + + + + + + + + + + + + + + + + + + Historical Simulation Checklist - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + +

Historical Simulation Checklist

+

Verifying an Extension of the Historical Simulation

+

Bob Suits 10/16/2020

+

Verify Input Hydrology

+

Check Boundary Conditions

+
    +
  • Sacramento River inflow
  • +
  • San Joaquin River Inflow
  • +
  • Sacramento River + Yolo Bypass Inflow
  • +
  • Banks pumping
  • +
  • Jones pumping
  • +
+

Get Observed Data

+

Preferably get the Sacramento and San Joaquin River inflows and Banks +and Jones pumping from DAYFLOW. If DAYFLOW isn’t complete, get remainder +daily average flow from CDEC. It needs to be independent of the DSM2 +set-up.

+

Get reported flow at SRV (Rio Vista) and generate daily average flow.

+

Generate daily average flow from DSM2 simulation at:

+

VCU, ORI, OH4, OBD, GLC, RSAC101, RSAN115, RSAC155

+

Compare daily average observed flows to DSM2-simulated flows at boundaries

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FlowCDEC StationsOperating AgencySimulated
Sac River InflowFPTUSGSRSAC155
SJR Inflow
+

+
RSAN112
SRV
+

+
RSAC101
Banks Pumping
+

+
VCU + ORI – OH4
Jones Pumping
+

+
OBD + GLC – ORI
Banks + Jones
+

+
VCU + OBD + GLC – OH4
+ +

Verify timing of installation and removal of temporary barriers and operation of Montezuma Control Structure and Delta Cross Channel Gates

+
    +
  1. +

    Create a dss file with the observed and simulated 15-minute data. + Compare observed and simulated stages just upstream and downstream + of each barrier site. This would already have been done with + observed data in establishing the timings by looking at observed + stages. Now repeat the analysis in order to confirm that you got the + operation timing correct.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    BarrierStations to use to check
     Middle River​MUP and (MAB or MTB)
    Grant Line CanalGCT and GLE
    Old RiverOBD and (OAD or ODM)
    Old River at HeadOH1 and SJL
    +
  2. +
  3. +

    Compare internal daily average flows affected by gate operations

    + + + + + + + + + + + + + + + + + + + + + +
    ObservedSimulated
    DLCDLC Delta Cross Channel
    NSLSLMZU025 Montezuma Slough at National Steel
    +
  4. +
  5. +

    Check key internal flows for overall circulation of Delta waters

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    ObservedSimulated
    GSSGSS
    TRNTRN
    OBIOBI (ROLD024)
    MDM Subtract RMID015-145 from RMID015-144
    OH4OH4 (ROLD034)
    VCUVCU
    OH1OH1
    OLDOLD
    GLEGLE
    +
  6. +
+

Verifying EC

+

Get OCO’s monthly updated EC estimates at: Banks, Jones, OH4, OBI to +compare to
+Delta Modeling Section’s historical simulation and reported EC.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Observed Simulated
BANKSBANKS
JONESJONES
OBIROLD024
OH4ROLD034
+

Compare observed EC to simulated EC at other key locations

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Observed Simulated
ANH ANH (RSAN007)
EMM EMM (RSAC092)
JER RSAN018
MDM RMID015
VCU CHVCT000
OH1 ROLD074
OLD ROLD059
+

Attachments:

+

+worddavdba6ec7d80d8e6d0f248fa3e9c1a9f2c.png +(image/png)
+ +worddav1c660f167bf01e13f8c74d8d923445b8.png +(image/png)
+ +worddavc1eb73f1cc8b69c78afeeda15ca65f9f.png +(image/png)
+ +worddavffb24ad6008690b469796d092cb3e822.png +(image/png)
+ +worddav7e51b7bfbf042cd6018456316774c8ac.png +(image/png)
+ +worddav46b8e2022c959a0ec4979c9f7a8a9b76.png +(image/png)
+ +worddav096303e2931d512411acda589b39c3cb.png +(image/png)
+ +worddavac6c9b93520fd08679d0ccfdc60b7b66.png +(image/png)
+ +worddav417450fd7fccb9d246170ebb7f24d040.png +(image/png)
+ +worddava898e0b2ae4581b97e766bf78287b5c1.png +(image/png)
+ +worddav68b2cbedb7a858f3c9b7f1a1e3ab74ac.png +(image/png)
+ +worddav5b3d61166388b0d6b2f355755006cda2.png +(image/png)
+ +worddav8c7ed9ee05ac17e5e96e011f3f2618cd.png +(image/png)
+ +worddav1767755d061f0eab1f3ca3f72ac8cef2.png +(image/png)
+ +worddavf80c76565c2ad18b4d77a1e3e7d95206.png +(image/png)
+ +worddavcbc13ea92d122ff1199746fedcb5d095.png +(image/png)
+ +worddavbec1a14c56fb496c1411aa3f37418f8f.png +(image/png)
+ +worddav7b88059002b010dabef7c4b1535bb124.png +(image/png)
+ +worddav5bb8f683823e3bf73c38767313d66e71.png +(image/png)
+ +worddavc305fa1a938290814fd2248549ba4430.png +(image/png)
+ +worddavdfbad699bc5178a523225eb4db224138.png +(image/png)
+ +worddav4ea5b554fecf90d7309db37a565d3a27.png +(image/png)
+ +worddav97cef626ad442220331e25e5e8e8107f.png +(image/png)
+ +worddave19a27a3183a9cd911129f634e5785e1.png +(image/png)
+ +worddavcafee77d96b6996a2ba1b263d685acf2.png +(image/png)
+ +worddav586bb64e084e848f881c6b85898711d9.png +(image/png)
+ +worddav06a87c364207da504d16cfe87f616b71.png +(image/png)
+ +worddavfed7fe74c73acf86c9270728c405a08c.png +(image/png)
+ +worddav1b9bd5a7d0e6c5fe479a6a2f41178b3d.png +(image/png)
+ +worddav252bc8a83f17f3aaf5122fc24dc34a7b.png +(image/png)
+ +worddavaefc8f4547e26bc30a4e1e2e6d9649e4.png +(image/png)
+ +worddav43dc9470b95f7adadead62075f065c27.png +(image/png)
+ +worddav44b58de538e094cc6f56fa252e0b3775.png +(image/png)
+ +worddavf81ccfffae04537644dbc328afa65327.png +(image/png)
+ +worddavfb16076aa13e127fd8eb1d3b049dec6e.png +(image/png)
+ +worddav9a6b20dc0a73de6bf9dab0456647125f.png +(image/png)
+ +worddav230f2ffdb48f91fc3c31cb101e946de0.png +(image/png)
+ +worddave2ddaa02d082a0f4038f1aa1cae93a47.png +(image/png)
+ +worddav41ab2bbaa193e682b05098057d919df2.png +(image/png)
+ +worddavcb34498bae50e99b02ddf21594a946c0.png +(image/png)
+ +worddav248f48ce932b6488517e2109896fdb56.png +(image/png)
+ +worddave25679296b7bc483b7fb1156d722e8b6.png +(image/png)
+ +worddav44d59b8b5a711866ad8dde94f0d42d27.png +(image/png)
+ +worddav660ebda068b8ef166732e0aef200cc6e.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Organizing_a_Study/index.html b/reference/Organizing_a_Study/index.html new file mode 100644 index 00000000..2b4215eb --- /dev/null +++ b/reference/Organizing_a_Study/index.html @@ -0,0 +1,586 @@ + + + + + + + + + + + + + + + + + + Organizing a Study - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Organizing a Study

+

Overview

+

The DSM2 installation directory is as follows:

+
/dsm2
+  /bin            # where the executable files are
+  /common_input   # a repository of common input files
+  /studies
+     [empty]      # DO YOUR WORK HERE (if inside the distribution)
+  /study_templates    # ...and NOT here
+     /historical
+     /ocap_sdip
+        config_ocap_sdip.inp
+        hydro.inp
+        qual_ec.inp
+        /timeseries
+  /timeseries     # Archivable time series (historical
+                  # and series used as basis for preprocessing)
+  /tutorials
+
+

Explaining the directory structure

+

bin

+

"Bin" stands for "binary" and refers to the executables. When you +installed dsm2, your path variable got set to the new distribution.

+

common_input

+

The common input directory is a repository of files that are used in the +standard templates. These files start with the name of the (parent) +object, then the "layer" name then the version date. The templates refer +to this directory often. You do not have to maintain these links, but +please do not edit the files... the Layering system should help with +this.

+

study_templates

+

This directory houses samples for historical and planning runs. They +represent our latest setup as of the distribution. As the templates are +updated they may point to newer files in common_input.

+

studies

+

A study is an associated group of simulations, which might involve any +combination of DSM2 modules. Often the study compares several different +alternatives. Many DSM2 modelers prefer to house different alternatives +in different folders, but there are good reasons to house them in one +study folder and just use different configuration files. To get started +you will typically copy one of the study_template sub-directories to the +/study folder. Don't change the ones in study_templates!

+

timeseries

+

The timeseries directory contains the timeseries you will need in the +regular course of working with DSM2. Since the data that are most +reusable are historical and DICU, that is most of what you will find +here. We don't recommend putting study-specific files (e.g. CALSIM +output) in this directory.

+

tutorials

+

The tutorials directory is a workspace for using the tutorials. It is +a lot like /studies in the sense that you will copy templates here.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Planning_Simulation_Checklist/index.html b/reference/Planning_Simulation_Checklist/index.html new file mode 100644 index 00000000..e96abe41 --- /dev/null +++ b/reference/Planning_Simulation_Checklist/index.html @@ -0,0 +1,513 @@ + + + + + + + + + + + + + + + + + + Planning Simulation Checklist - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Planning Simulation Checklist

+

Model Preparation

+

Get Input Data from Calsim

+
    +
  • Rename Calsim output as dv.dss and put in timeseries\CALSIM\
  • +
  • Preprocess Calsim output to DSM2 inputs
  • +
+

Check Boundary Conditions

+
    +
  • Sacramento River inflow
  • +
  • San Joaquin River Inflow
  • +
  • Sacramento River + Yolo Bypass Inflow
  • +
  • Banks pumping
  • +
  • Jones pumping
  • +
+

Martinez Stage

+
    +
  • Planning stage
  • +
  • Sea Level Rise stage ()
  • +
+

Martinez EC

+
    +
  • Martinez EC generator
  • +
+

Consumptive Usage

+
    +
  • DCD planning (which is also input for Calsim3)
  • +
+

Gate Operation

+
    +
  • DCC
  • +
  • Montezuma Gate
  • +
  • Clifton Court Forebay Gate
  • +
+

Model Run

+

Binary Versions

+
    +
  • suggest using relative path and .bat
  • +
+

Running Time Window

+
    +
  • 16-year: 1975/10 - 1991/9
  • +
  • 94-year: 1975/10 - 1991/9
  • +
  • start a few months earlier for warming-up
  • +
+

Postprocess, Visualization, Usage

+

Notebook

+

Water Quality Standard

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Post_processing_and_Visualization/index.html b/reference/Post_processing_and_Visualization/index.html new file mode 100644 index 00000000..5f205da6 --- /dev/null +++ b/reference/Post_processing_and_Visualization/index.html @@ -0,0 +1,472 @@ + + + + + + + + + + + + + + + + + + Post processing and Visualization - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Post processing and Visualization

+

DSM2 writes out model information in HEC-DSS format and HDF5 format. +Output requested from the model is written to HEC-DSS while model input +and state is recorded in HDF5 files.

+

Vista is the standard tool for accessing both these kinds of information +for DSS. Vscript is the associated scripting tool which leverages the +Python language with the HEC-DSS and HDF5 java libraries.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Presentations/index.html b/reference/Presentations/index.html new file mode 100644 index 00000000..565a74ad --- /dev/null +++ b/reference/Presentations/index.html @@ -0,0 +1,525 @@ + + + + + + + + + + + + + + + + + + Presentations - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + + + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/RKI_Referenced_Output/index.html b/reference/RKI_Referenced_Output/index.html new file mode 100644 index 00000000..f97bfe66 --- /dev/null +++ b/reference/RKI_Referenced_Output/index.html @@ -0,0 +1,1154 @@ + + + + + + + + + + + + + + + + + + RKI Referenced Output - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

RKI Referenced Output

+

River Kilometer Index is a way to index locations along a river by +measuring the distance in kilometers from the downstream end of the +river, e.g. RSAC054 is 54 km from the Golden gate bridge, the most +downstream discernible reach of the Sacramento River

+

In light of current GIS information this practice should be superseded +by exact latitude,longitude coordinates, however it is resilient in its +use due to legacy reasons. It is also useful in terms of physical +processes which are related to the distance along the river rather than +coordinates in a general space

+

The table below comes from DSM2 v6 (last referenced by J. Anderson) 

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NAMECHANDISTCOMMON_NAME
BYOLO0403990# Yolo Bypass          
CFTRN000172727# Turner Cut          
CHCCC0062470# Contra Costa Pumping Plant / Rock Slough     
CHDMC0042162000# DMC           
CHDMC0062160# DMC @ Tracy Pumping Plant       
CHGRL0052111585# Grant Line Canal (West Position)       
CHGRL00920736# Grant Line Canal (East Position)       
CHGRL0122041672# Grant Line Canal @ Head       
CHSAC03039223614# Sacto. Ship Channel         
CHSAC03139220661# Sacto. Ship Channel         
CHSWP00382length# Clifton Court Forebay (gates)        
CHVCT0002291328# Victoria Canal          
CHWST0002323084# Clifton Court Forebay Entrance        
CRGRV0024550# Green Valley Creek         
CRSUS0044570# Suisun Creek @ Cordelia Rd.       
LSHL001281113# Sac. @ Lake Sherman        
LSHL0032995145# SJR @ Mayberry Cut        
RCAL009210# Calaveras River at Stockton        
RCSM0755492501# Cosumnes River          
RFAL0082765648# FALSE River @ Webb Tract       
RMID005156140# Middle River          
RMID007248665# Middle River          
RMID015_144144838# Middle River          
RMID015_1451452114# Middle River (same as #144)       
RMID023135719# Middle River @ Borden Hwy       
RMID0271333641# Middle River @ Tracy Blvd       
RMID0401263951# Middle River @ Mowery Bridge       
RMID0411251700# Middle River @ Old River       
RMKL0053745030# North Fork Moke. River (Georgiana Sl.)      
RMKL019357694# North Fork Moke. River        
RMKL027334350# Moke. River @ Thornton        
RMKL0325502617# Moke. River near Thornton        
RMKL0705500# Moke. River @ Woodbridge        
ROLD0141170# Old River @ Holland Cut       
ROLD0241062718# Old River @ Bacon Island (near CCC)     
ROLD034903021# Old River near Byron        
ROLD040822609# Old River @ Clifton Court Ferry      
ROLD046801431# Old River          
ROLD047792766# Old River          
ROLD059713116# Old River @ Tracy Road       
ROLD07454735# Old River @ Head        
RSAC054441length# Martinez (MRZ)          
RSAC0564413119# Martinez at Benicia Bridge        
RSAC064452190# Port Chicago          
RSAC07543711108# Mallard Island (MAL)         
RSAC0774371870# Pittsburg (PTB)          
RSAC0814365733# Collinsville           
RSAC0844359662# Sac River near Sherman Lake       
RSAC092434435# Emmaton           
RSAC1014309684# Rio Vista (RIV)         
RSAC1234231358# Sac near Georgiana Slough        
RSAC1284218585# Sac above DCC         
RSAC1394184814# Sac @ Green's Landing         
RSAC1404180# Sac @ Snodgrass Sl.        
RSAC1424175496# Sac @ Hood         
RSAC15541411921# Sac @ Freeport         
RSAN002534276# Mouth SJR          
RSAN0032851700# SJR @ Sherman Lake        
RSAN00752366# SJR @ Antioch         
RSAN008520# Lone Tree Way @ Hwy. 4 near Antioch     
RSAN014499570# Blind Point          
RSAN018834213# Jersey Point          
RSAN024478246# SJR @ Bradford Isl.        
RSAN0323499672# San Andreas Landing, NOTE: RSAN032 water comes from Moke. R, hence the model location is on Moke. MM, 2000.09.06        
RSAN03742286# SJR @          
RSAN040383526# SJR           
RSAN0433198571# SJR @ Venice Isl.        
RSAN046315628# SJR between Turner & Columbia Cut      
RSAN052242643# SJR @ Rindge Pump        
RSAN058202520# SJR @ Stockton Ship Channel       
RSAN063143281# SJR @ Stockton         
RSAN072109400# SJR @ Brandt Bridge        
RSAN08763930# SJR @ Mossdale         
RSAN112174744# SRJ @ Vernalis         
RSMKL0083447088# South Fork Moke @ Staten Island      
RSMKL024337971# South Fork Moke @ New Hope Bridge     
SLBAR0024060# Barker Slough / North Bay Aqueduct      
SLCBN0014774000# Chadbourne Sl. (Hollywood Club)        
SLCBN0024770# Chadbourne Sl. (Sunrise Club)        
SLCCH0164020# Cache Slough          
SLCRD0004717216# Cordelia Sl. (Miramonte)         
SLCRD0034743754# Cordelia Sl. (Cygnus)         
SLCRD0064694776# Cordelia Sl. (Ibis)         
SLCRD00946811200# Cordelia Sl. (Garibaldi)         
SLDUT0072747351# Dutch Sl.          
SLDUT0092734026# Dutch Sl.          
SLFHN0024792640# Frank Horan Sl.         
SLGYR0035010# Goodyear Sl. (Morrow Island)        
SLGYR0084731955# Goodyear Sl. (Ghost Fleet)        
SLHIL0024864615# Hill Sl.          
SLIND0052390# East CC Pumping Plant (Discovery Bay)      
SLMAY0022831611# Mayberry Sl.          
SLMID0012895441# Middle Sl. @ Winters Island       
SLML0014434599# Mallard Sl. (CCWD)         
SLMZU0035230# Montezuma Sl. @ Hunter Cut       
SLMZU0115177662# Montezuma Sl. @ Beldon's Landing       
SLMZU02551313883# Montezuma Sl. @ National Steel       
SLMZU0295131346# Montezuma Sl. @ Roaring River       
SLMZU0325111677# Montezuma Sl.          
SLNY0022882005# New York Sl.         
SLPPR0002684735# Piper Sl. @ Bethel Tract       
SLPPR0032698333# Piper Sl. @ Bethel Isl.       
SLRAR0005291250# Roaring River          
SLRAR0095327375# Roaring River @ Sprig        
SLRCK005247221# Rock Slough (CCC)         
SLSBT0113852273# Steamboat Sl.          
SLSUS0124619982# Suisun Sl. @ Volanti        
SLTMP0001941946# Tom Paine Sl. Intake        
SLTMP0171850# Tom Pain Sl.         
SLTRM004310540# Three Mile Sl. @ SJR       
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Scripts_and_input_files/index.html b/reference/Scripts_and_input_files/index.html new file mode 100644 index 00000000..5d3a0d47 --- /dev/null +++ b/reference/Scripts_and_input_files/index.html @@ -0,0 +1,609 @@ + + + + + + + + + + + + + + + + + + Scripts and input files - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Scripts and input files

+

Folder Structure

+

Most preprocess scripts are originally located at ${DSM2}/scripts/, +while it also needs DSM2 config file and input timeseries (from CalSIM) +to run.

+

Fig. Sample batch preprocess from CalSIM II to DSM2

+

Some key functions are explained below:

+
    +
  • config file usually defines the time window, CALSIMNAME, + DSM2MODIFIER, which need to be consistent with the scripts and + timeseries files.
  • +
  • planning_boundary_flow contains method 'smooth_flow' to 'tension + spline' boundaries Sac and Vernalis from monthly to daily-interval + data.
  • +
  • DICU are directly transferred as monthly-interval data.
  • +
  • prep_ec generate Martinez EC from its astro-planning stage and NDO + (modified from G-model and has a newly calibrated version). Refer to + Martinez EC + Generator for + its introduction and updates.
  • +
+

To run

+

type prepro*.bat config*.inp at the scenario path in command window

+

*usually we start preprocess earlier (like 1 month) than hydro + qual

+

CWF preprocess version in DSM2 v806

+

CH2M helped creating an updated version for project 'California Water +Fix' (CWF), whose existing condition (EST)  and No Action Alternative +(NAA) are widely used as templates for DSM2 planning study.

+ + + + + + + + +
+

+
+

+
+

+
+ +

Figure. Sample directory of CWF scenario for DSM2 planning modeling

+

Compared to the original scripts package, CWF version has

+
    +
  • a sub-folder 'scripts' under scenario. It contains updated scripts + to replace those under ${DSM2}/scripts/.
  • +
  • a sub-folder 'input' under scenario. It contains updated hydro and + qual grids.
  • +
  • a sub-folder 'timeseries' under scenario. It contains updated DSM2 + input files (*daily.dss for boundaries, *.dss for Martinez stage, + DICU, oprule, QUAL, *qa.dss for QAQC)
  • +
  • the above input files are generated from its 'CALSIM' subfolder, + with *DV.dss (CalSIM outputs) and *SV.dss (CalSIM inputs) required + by the current CalSIM II preprocess (CalSIM 3 plans to keep only + *DV.dss)
  • +
+

+

Figure. CWF version's batch preprocess from CalSIM II to DSM2

+

More details in the scripts:

+
    +
  • planning_boundary_flow contains method 'smooth_flow' to 'tension + spline' boundaries Sac and Vernalis from monthly to daily-interval + data.
  • +
  • CWF dailymapping processes all boundaries and source flows to + daily-interval data.
  • +
  • prep_ec has updated changes with sea level rise adjustment
  • +
  • Vernalis Adaptive Management Plan (VAMP) of the original scripts is + not active anymore
  • +
  • Source flows and intakes are added/modified
  • +
  • Method 'daily mapping' to process all boundaries/source inputs to + daily-interval data
  • +
+

Attachments:

+

+bat_prep_orig.png (image/png)
+ +bat_prep_cwf.png (image/png)
+ +CWFtimeseries.JPG (image/jpeg)
+ +CWFcalsimfiles.JPG (image/jpeg)
+ +CWFdir.JPG (image/jpeg)
+ +bat_prep.png (image/png)
+ +prep_doc_bst.py +(application/octet-stream)
+ +planning_ec_mtz_bst.py +(application/octet-stream)
+ +planning_boundary_flow_bst.py +(application/octet-stream)
+ +extend_calsim_outputs_bst.py +(application/octet-stream)
+ +expand_seasonal_bst.py +(application/octet-stream)
+ +dailymapping_051010.py +(application/octet-stream)
+ +prep_ec_bst.py +(application/octet-stream)
+ +prepro_BST_Existing.bat +(application/octet-stream)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Supporting_Tools_and_QAQC/index.html b/reference/Supporting_Tools_and_QAQC/index.html new file mode 100644 index 00000000..6664fcd3 --- /dev/null +++ b/reference/Supporting_Tools_and_QAQC/index.html @@ -0,0 +1,573 @@ + + + + + + + + + + + + + + + + + + Supporting Tools and QAQC - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Supporting Tools and QAQC

+

Currently, all the preprocess scripts are written with +DSM2-Vista. It's recommended +to use the most updated version at +https://github.com/CADWRDeltaModeling/dsm2-vista (not +the one included in DSM2 package). * Note to change the relevant +environment variables.

+

1.

+

It is good practice to review and compare CalSIM outputs and its +preprocessed results (*.dss) before running DSM2. +HEC-DSS and +DSM2-Vista are the most widely +used tools.

+

http://www.hec.usace.army.mil/software/hec-dss/

+

2.

+

WRIMS' report tool is useful to compare CalSIM outputs, i.e. DSM2 +inputs (timeseries\CALSIM\DV.dss)

+

https://www.water.ca.gov/Library/Modeling-and-Analysis/Modeling-Platforms/Water-Resource-Integrated-Modeling-System

+

* Note to keep consistence in the pathnames, time windows, etc between +the comparison scenarios.

+

+

+

+

3.

+

Another good tool to compare between scenarios dss (in general) is the +scripts Compare DSS +Tool in +DSM2-Vista.

+

${vista}\bin\compare_dss.bat

+

+

or a simplified version to check +consistence ${vista}\bin\compare_dss_files.bat

+

* One good practice is to locate changes first (maybe the big ones), +then use compare_dss.bat to specify and illustrate them.

+

+

4.

+

A quick way to check accuracy of preprocess is to compare Net Delta Flow +(NDO) = inflows-outflow-CU

+

Attachments:

+

+compareDSSfiles.JPG (image/jpeg)
+ +compareDSS.JPG (image/jpeg)
+ +wrimsReport2.JPG (image/jpeg)
+ +wrimsReport1.JPG (image/jpeg)
+ +wrimsReport.png (image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Update_DSM2_historical_simulation/index.html b/reference/Update_DSM2_historical_simulation/index.html new file mode 100644 index 00000000..230a99b4 --- /dev/null +++ b/reference/Update_DSM2_historical_simulation/index.html @@ -0,0 +1,1202 @@ + + + + + + + + + + + + + + + + + + Update DSM2 historical simulation - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + +

Update DSM2 historical simulation

+

Introduction

+

The historical simulation of Delta Simulation Model II (DSM2) simulates +the ground truth of Sacramento-San Joaquin Delta(Delta) hydrodynamics +and water quality. It requires collecting the observed flows, stages, +and water quality at the model boundaries and the actual gate operations +as much as possible, converting them into the data format DSM2 +recognizes, and conducting quality assurance and quality control +(QA/QC). The missing inputs must be filled to make DSM2 run +successfully. This document directs all the procedures of preparing the +inputs for DSM2 historical simulation. All the required scripts and +information for updating DSM2 historical simulation have been included +in the package. The package has been saved in the shared folder +\nasbdo\Delta_Mod\Share\lanliang\Update_DSM2_package. Some links might +not be effective on this page, but work well in the document Update +DSM2 Historical Simulation.docx in the shared folder.

+

Prerequisites

+

*1.     Follow the instructions here to +add the option 'Open command window here' to the Windows Explorer context menu. You will need administrative privileges to do this, and you +should only do this if you are comfortable modifying the registry in Windows 10. This will allow you to open +a command window by right clicking on a folder in Windows explorer. DSM2 models and Python +scripts can be run in the command window.

+

*2.     *HEC-DSSVue:  Download and install HEC-DSSVue. It is essential for visualizing and +examining data and converting data into the required formats.

+

3.     Vtools: Download and install Vtools. It is the tools to process the DSS data. Its +functions are called by the python scripts to update DSM2 historical +simulation.

+

*4.  *  Python: Download and install Python 2.7, the version +compatible with Vtools, from the websites of +Python or +Anaconda.

+

The schematic of updating procedures

+

The schematic indicates the four major steps for updating the inputs for +a DSM2 simulation of historical Delta hydrodynamics and electrical +conductivity (EC). Each step consists of collecting part of data and +conducting QA/QC. The technical specification of the procedure has been +described below. Once the four steps for updating the DSM2 historical +simulation are conducted, the latest version of the DSS files +(DICU_YYYYMM.dss, histYYYYMM.dss, and gates-v8-YYYYMM.dss) are +generated. Copy these three DSS files in the DSM2 subfolder \timeseries +and the DSM2 historical simulation update is completed. 

+

+

Step 1: DICU Update (under the folder /DICU)

+

1.      Retrieve CIMIS data

+

-          Log in +CIMIS with the user name wildej and password +delta, and click DATA and then My Reports.

+

-          Under the list Quick Reports, click Execute +monthly Report, List 1, and then the report is loaded in an +Excel spreadsheet. This report includes the precipitation and reference +ET and other climatic data at stations Davis(6), Brentwood(47), +Manteca(70), Twitchell(140), Lodi West(166), Tracy(167). Right now, the +data at Lodi West are missing on that spreadsheet.

+

-         Log off CIMIS and close the browser.

+

2.      Prepare precipitation and evapotranspiration data in +DICU-YYYYMM.xlsm

+

-          Delete the second and third columns in the downloaded +spreadsheet, and copy the data intoDICU-YYYYMM.xlsm  on CIMISData sheet with the +same format.

+

-          Check data on PrecipLookupToDSS sheet are correctly +linked to the data on CIMISData sheet. Sometimes, the data is not +available in some months or some stations on CIMISData. When the +downloaded spreadsheet is pasted on the CIMISData sheet, the +precipitation and reference ET on CIMISData will be automatically copied +to another sheet PrecipLookupToDSS. In order to make the data +automatically transfer from CIMISData to PrecipLookupToDSS, the rows of +the missing data in CIMISData have to be filled with blanks.

+

-          Extend the data on TimeSeries sheet, and keep the same +formulas on each column. The long-term mean values in the columns, such +as “Mean Evap-ET”, “DICU Ave Evap(mm)” and “Hist Ave Evap”, could repeat +the same values as those in the previous year.

+

-          Save the spreadsheet. Do not close the file. It will be +used for updating the precipitation and reference ET for the DICU model.

+

3.      Update the input files in DICU model

+

-          Go to DICU/Precip/7STATION-DAT-Y2K , open the Precip input +file 7STAPREC.WY20XX. The file saves the monthly precipitation in the +water year 20XX at seven stations: Davis, Rio Vista, Brentwood, Tracy, +Stockton, Lodi and Galt. CIMIS has not collected the precipitation data +at Galt for recent several decades. Update the precipitation with the +same station names from the spreadsheet PrecipLookupToDSS, and copy the +same precipitation at Lodi into the column of Galt.

+

-          Starting from WY2015, the Lodi West data from CIMIS is +missing, so the Lodi precipitation after April 2015 is downloaded from  +the National Centers for Environmental Information, +University of California Agriculture & Natural Resources (UCANR). Pasted +the downloaded Lodi precipitation on the spreadsheet PrecipLookupToDSS.

+

-          Update the Precip file located at +DICU/PRECIP/7STATION-DAT-Y2K . If some of the downloaded Lodi data are +missing, the precipitation at Stockton can be taken as the substitute, +since Stockton is the nearest available station to Lodi.

+

-          Save the Precip file. (If starting a new water year, make +sure to copy the file for the next water year with the appropriate name +of the next water year + 1 so the average precip information will not be +lost!)

+

-          Water year type. Go to +the website to get the water year type, and go to the +folder DICU/DICU5IN1/1922-20**(the current year to update), and update +WYTYPES file with textpad.

+

-          Pan evaporation. Go to +DICU/PAN_EVAP, update README-2YYY.txt file with the +Manteca pan evap (in and mm), AVE EVAP of DICU_YYYYMM.xlsm and ET ADJ +FACTR from the “TimeSeries” worksheet of DICU_YYYYMM.xlsm to keep a +record of the data used.

+

-          Update DICU5.5 txt file. Go to +DICU/DICU5/1922-20**, update DICU5.5 txt file at the bottom. +You will want to update the water year type and the ET adjustment +factors which are in rows where the values are from column in the +“TimeSeries” worksheet of DICU_YYYYMM.xlsm. Also remember this is by +water year and the data spacing must not change. 

+

4.      Run DICU

+

-          Go to DICU/DICU5IN1/1922-20**, update dicu5in1.py. The +lines marked with “Update here!” must be updated. Open the command +window, and run:

+

python dicu5in1.py

+

-          Go to DICU/DICU5/1922-20**, update and run the python +script run-dicu5.py. The lines marked with “update folder name” must be +updated. Open the command window, and run:

+

python run-dicu5.py

+

-          Go to DICU/NODCU/NODCU12/1922-20**/, update and run the +python script bat1922-20**.py. The lines marked with “update folder +name”, “update the year”, and “update the month” must be updated. Open +the command window, and run:

+

python bat1922-20**.py

+

-           Copy the file DICU_YYYYMM.dss from +/DICU/NODCU/NODCU12/1922-20**/ to the folder /timeseries of the DSM2 +historical simulation.

+

Step 2: DSM2 Boundary Inputs Update (under the folder /DSM2_flow_stage_EC_input)

+

1.      Boundary inputs from CDEC

+

-          Most DSM2 boundary data can be retrieved from CDEC. The +python script Retrieve_data_fromCDEC.py can retrieve the data of CDEC +stations as an input text file defines, and write the retrieved data +into a DSS file.

+

The input text file is a queue of the information of the requested CDEC +stations, and each line in the file contains the station ID, sensor +number, and duration of one CDEC station.

+

The data in the DSS file is the raw data with data gaps frequently. To +conduct writing the timeseries in the DSS file without interruptions, +they are defined as irregular timeseries.

+

Get a command prompt window under the folder /DSM2_flow_stage_EC_input, +and type the line below to retrieve the DSM2 boundary data.

+

Python Retrieve_data_fromCDEC.py arg1 arg2 arg3 arg4

+

where

+

Arg1 – The first argument, the text file of the CDEC station +information. To retrieve DSM2 boundaries, Arg1 is +Delta_boundaries.txt. The DSM2 boundary flows, stages, and ECs, +from CDEC are included in this text file. Table 1 lists the DSM2 +boundaries, and their corresponding CDEC stations and related +information, which the text file has included. CDEC does not have the +DSM2 boundary, Mokelumne River inflow.

+

Arg2 – The second argument, the starting date of the data, formatted as +mm/dd/yyyy.

+

Arg3 – The third argument, the ending date of the data, formatted as +mm/dd/yyyy.

+

Arg4 – The fourth argument, the name of the DSS file that stores the +retrieved data.

+

Below is an example to download the DSM inputs from 1/1/2017 through +12/31/2017 and write the retrieved data into a DSS file named as +update201712.dss.

+

Python Retrieve_data_fromCDEC.py Delta_boundaries.txt 1/1/2017 +12/31/2017 update201712.dss

+

Table 1 The CDEC stations to retrieve +data as the DSM2 inputs

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
CDEC stationRKI nameDSM2 input typeInput locationTime interval
LISBYOLO040flowYolo Bypass1 DAY
YBYBYOLO040flowYolo Bypass1 DAY
VNSRSAN112flowSan Joaquin River at Vernalis1 DAY
FPTRSAC155flowSacramento River at Freeport1 DAY
NHGRCAL009flowCalaveras River at Stockton1 DAY
MHBRCSM075flowCosumnes River at Michigan Bar1 DAY
HROCHSWP003exportBanks puming1 DAY
TRPCHDMC004exportJones (Tracy) pumping1 DAY
BKSSLBAR002exportNorth Bay Aqueduct1 DAY
CCWCHVCT001exportMiddle River pumping1 DAY
IDBROLD034exportOld River pumping near Discovery Bay1 DAY
INBCHCCC006exportRock Slough pumping near Brentwood1 DAY
MRZRSAC054stageMartinez15 MIN
MRZRSAC054ECMartinez1 HOUR
SRHRSAC139ECSacramento River at Hood1 DAY
VERRSAN112ECVernalis1 DAY
+

-          QA/QC of all the data in the DSS file except Martinez +stage and EC. Conduct QA/QC and remove errors for those timeseries. +Martinez stage and EC will be processed independently after Step 2.

+

-          Use HEC-DSSVue to fill the data gaps. HEC-DSSVue can +automatically fill the gaps with several time steps missing by +interpolation when converting irregular to regular timeseries. There is +another way to fill the gaps with longer intervals. Go to the menu of +HEC-DSSVue, and click: Tools->Math Functions->General->Estimate +missing values.

+

-          Use HEC-DSSVue to convert the irregular timeseries into +regular timeseries. Go to the menu of HEC-DSSVue, and click: +Tools->Math Functions->Time Functions->select operator: +min/max/avg/…over period -> select function type: average over period +-> select new period intervals as Table 1 -> Compute->save with the +default pathnames.

+

-          Copy and paste the file update201712.dss with regular +timeseries into the folder /merge_data.

+

2.      Retrieve ancillary data from CDEC

+

-          Call the same Python script Retrieve_data_fromCDEC.py to +retrieve extra data for QA/QC, filling data gaps at Martinez, and +checking the gates operation times. The CDEC stations for fulfilling +those functions are listed in the text file, ancillary_stations.txt. +Below is an example to download the ancillary data from 1/1/2017 through +12/31/2017 and write the retrieved data into a DSS file named as +ancillary201712.dss.

+

Python Retrieve_data_fromCDEC.py ancillary_stations.txt +1/1/2017 12/31/2017 ancillary201712.dss

+

*3.    *Download data from other sources

+

-          San Francisco stage for updating Martinez stage

+

Go to the website: +http://tidesandcurrents.noaa.gov/waterlevels.html?id=9414290

+

Choose the options:

+

Units: Feet

+

Timezone: LST

+

Datum: MLLW

+

Interval: Hourly

+

Update: Data Only

+

When the data list shows on the screen, click the button: Export to CSV, +to save the data in a csv file. Load the csv data in HEC-DSSVue, and +convert its datum from MLLW to NGVD by NGVD = MLLW - 2.64 feet

+

-          Yolo Bypass Flow

+

Yolo Bypass is a wide-open area, so it is hard to investigate the actual +flows in this region. There is no flow station at the DSM2 boundary +location. As a boundary flow in DSM2, Yolo Bypass flow has been assumed +to equal the aggregation of the flows collected from those stations near +Yolo Bypass, like the Yolo Bypass flow (QYOLO) from DAYFLOW as

+

QYOLO = Yolo Bypass flow at Woodland + +Sacramento Weir Spill + South Fork Putah Creek flow

+

These three flows can be retrieved from CDEC station YBY, USGS station +11426000(SACRAMENTO WEIR SPILL TO YOLO BYPASS NR SAC CA), and CDEC +station PUT.  The last two stations have been inactive for recent years, +so YBY is the unique effective station collecting the Yolo Bypass flow.

+

However, DSM2 v6.0 historical simulation update tool took the flow at +CDEC station RUM (Cache Creek at Rumsey Bridge) as the Yolo Bypass flow. +It is not appropriate, especially the inflow to the Sacramento River +during summers. Generally, during summers and falls Yolo Bypass has toe +drain instead of inflow to the Sacramento River, and during winters and +springs it functions as a diversion to reduce the Sacramento River +floods.

+

DSM2 input data version is based on the timeseries ending time. Starting +from the version December 2017 (12/2017), the RUM flow from 2006 through +current has been replaced by the available observed flows at YBY and +LIS. The old input data versions keep the RUM flow.

+

Furthermore, from the version 12/2017, CDEC station LIS flow is taken as +the Yolo Bypass flow from June to November every year, while CDEC +station YBY flow is taken as the Yolo Bypass flow from December to next +May. If the Sacramento River floods diverted into Yolo Bypass come +earlier than December or after May, YBY flow might be accounted as the +Yolo Bypass flow of the DSM2 inputs in those months.

+

The combination of YBY and LIS flows in the file update201712.dss +under the folder /DSM2_flow_stage_EC_input is conducted in HEC-DSSVue. +Open update201712.dss in HEC-DSSVue, set the time window December +1st -May 31st, select the daily YBY flow, +duplicate the daily YBY flow and rename it as +/CDEC/LIS/FLOW//1DAY/20_E/. It overwrites the daily LIS flow from +December through May. After that, the daily LIS flow represents Yolo +Bypass flow and will be merged with the previous version of DSM2 +inputs. 

+

-          Mokelumne River flow

+

Contact the staff in East Bay Municipal Utility District (EBMUD) +directly. Their website only presents the flow at Mokelumne River below +WID for the last seven days:

+

http://www.ebmud.com/water-and-drought/about-your-water/water-supply/water-supply-reports/daily-water-supply-report/

+

Kevin Fung kevin.fung@ebmud.com has been contacted for the past +several years and provided the raw data of the current year.

+

The QA/QC’d Mokelumne River flow of the previous years, which EBMUD sent +to USGS, can be downloaded from USGS website.

+

Once Mokelumne River flow is received, use HEC-DSSVue to load and save +it in the DSS file /DSM2_flow_stage_EC_input/updateYYYYMM.dss. Here MM +and YY/YYYY are the month and year of the updated version, and set the +pathname of this time series as

+

/FILL+CHAN/RMIL070/FLOW//1DAY/DWR-DMS-YYYYMM/

+

Step 3: Martinez boundaries Update

+

1.      Martinez Stage (DATUM: NAVD88; under the folder +/MTZ_stage_EC/fill_stage)

+

-          Remove stage data errors. Copy Martinez CDEC stage +from /DSM2_flow_stage_EC_inputs/update201712.dss to a new DSS file, for +example MTZ_201712.dss, under the folder /MTZ_stage_EC/fill_stage. Check +the data in the file MTZ_201712.dss, remove errors in the stage +timeseries, record the time window of each data gap at the end of the +text file input.txt, and delete the out-of-date time windows in the file +input.txt.

+

-          One hour shift of Daylight saving time. Compare the +astronomical tide and CDEC retrieved stage at Martinez, select the time +window of one hour shift due to Daylight saving time, and shift one hour +to match the phases of astronomical tides. The time shift can be +conducted in HEC-DSSVue under the menu Tools -> Math Functions ->Time +Functions -> Operator: Shift in Time. Record the data gaps in March and +November at the end of input.txt because of the time shift.

+

-          Prepare the input file. Prepare the input text file +(input.txt) for the python script (fillgaps.py) to fill the stage gaps. +Below is an example of the file input.txt. It includes the data +version(tmark), names of input and output DSS files, the pathnames of +timeseries used in this gap filing, and the time windows of the stage +data gaps found. The time windows are suggested to be longer than one +day or two days to fill the gaps smoothly.      

+

                                          

+

-          Run the python script:

+

Python fillgaps.py input.txt

+

2.      Martinez EC (under the folder /MTZ_stage_EC /fill_EC)

+

-          Remove the errors in the observed 15-minute EC data. Copy +Martinez 15-minute EC data from +/DSM2_flow_stage_EC_inputs/update201712.dss to a new DSS file, for +example MTZ_201712.dss, under the folder /MTZ_stage_EC/fill_EC. Open +MTZ_201712.dss, find and remove the errors in the Martinez 15-minute EC +data, and record the data gaps in the text files +.\timewindows_ec_ave.txt.

+

-          Average the filtered 15min MRZ EC to hourly MRZ EC and +save part F of the pathname as /100_E_AVE/. Then the hourly MRZ EC has +the pathname as

+

/CDEC/MRZ/EC//1HOUR/100_E_AVE/

+

-          Copy Mallard hourly and daily EC data from +/DSM2_flow_stage_EC_inputs/ ancillary201712.dss to the same DSS file +/MTZ_stage_EC/fill_EC/MTZ_201712.dss.

+

-          Calculate NDOI by combining 6 inflows, 6 exports and +Delta consumptive use.

+

1)      In the file /DSM2_flow_stage_EC_input/updateYYYYMM.dss, use +HEC-DSSVue to sum up the six inflows into one total inflow with the +pathname

+

Path1: /CDEC/TOTAL/FLOW//1DAY/20_H/.

+

The pathnames of the inflows are listed below,

+

Sacramento inflow:   /CDEC/FPT/FLOW//1DAY/20_H/

+

San Joaquin inflow:   /CDEC/VNS/FLOW//1DAY/20_E/

+

Yolo Bypass inflow:   /CDEC/LIS/FLOW//1DAY/20_E/

+

Calaveras inflow:       /CDEC/NHG/FLOW//1DAY/23_H/

+

Cosumnes inflow:      /CDEC/MHB/FLOW//1DAY/20_H/

+

Mokelumne inflow:   /FILL+CHAN/RMIL070/FLOW//1DAY/DWR-DMS-YYYYMM/

+

2)      Also sum up the six exports into one total export with the +pathname

+

Path2: /CDEC/TOTAL/FLOW_EXPORT//1DAY/70_D/.

+

The pathnames of the six exports are listed below,

+

SWP:                                        /CDEC/HRO/FLOW_EXPORT//1DAY/70_D/

+

CVP:                                         +/CDEC/TRP/FLOW_EXPORT//1DAY/70_D/

+

North Bay aqueduct:               /CDEC/BKS/EXPORT//1DAY/70_D/

+

Old River near Brentwood:     /CDEC/INB/FLOW_EXPORT//1DAY/70_D/

+

Old River near discovery bay: /CDEC/IDB/FLOW_EXPORT//1DAY/70_D/

+

Middle River:                          /CDEC/CCW/FLOW_EXPORT//1DAY/70_D/

+

3)      Calculate the total inflow Path1 minus the total export Path2, +and save it as the timeseries with the pathname

+

Path3: /CDEC/TOTAL/FLOW //1DAY/INFLOWS-EXPORTS/

+

4)      Copy the timeseries with Path3 to the DSS file +/MTZ_stage_EC/fill_EC/MTZ_201712.dss

+

5)      Copy the latest version of dicu_YYYYMM.dss from the folder +/DICU/NODCU/NODCU12/1922-2017 to the folder /MTZ_stage_EC/fill_EC.

+

6)      Use HEC-DSSVue to open file /MTZ_stage_EC/fill_EC +/dicu_YYYYMM.dss, and sum up all the timeseries with part C DIV-FLOW to +one timeseries with

+

Path 4: /DICU-HIST+NODE/TOTAL/DIV-FLOW//1MON/DWR-BDO/

+

7)      Sum up all the timeseries with part C DRAIN-FLOW to one +timeseries with

+

Path 5: /DICU-HIST+NODE/TOTAL/DRAIN-FLOW//1MON/DWR-BDO/

+

8)      Sum up all the timeseries with part C SEEP-FLOW to one +timeseries with

+

Path 6: /DICU-HIST+NODE/TOTAL/SEEP-FLOW//1MON/DWR-BDO/

+

9)      Calculate Delta consumptive use by Path 4+Path6-Path5, and save +it as one timeseries with the pathname

+

Path 7: /DICU-HIST+NODE/TOTAL/FLOW//1MON/DWR-BDO/

+

10)  Convert the timeseries with path 7 into daily data,

+

Path 8: /DICU-HIST+NODE/TOTAL/FLOW//1DAY/DWR-BDO/

+

and copy the daily one to the DSS file +MTZ_stage_EC/fill_EC/MTZ_201712.dss

+

11)  Open file MTZ_201712.dss, and calculate NDOI by subtracting the +timeseries with Path8 from that with Path 3. The NDOI data has the +pathname

+

Path 9: "/FILL+CHAN/NDOI/FLOW//1DAY/ DWR-DMS-YYYYMM/

+

Here YYYY and MM are the year and month of the updated version.

+

-          Prepare the input file. Prepare the input text file, such +as timewindows_ec_ave.txt, for the python script (fillgaps_ec.py) to +fill the EC gaps. Below is an example of the file +timewindows_ec_ave.txt. It includes the data version(tmark), names of +input and output DSS files, and the time windows of the data gaps +found.   

+

               

+

-          Run the Python script, and obtain the output file +filled.dss.

+

Python fillgaps_ec.py timewindows_ec_ave.txt

+

3.      Merge the updated data and the previous version of DSM2 +input data

+

-          After the data gaps have been filled, copy QA/QC’d +Martinez 15-minute stage and 1-hour EC from the folders +/MTZ_stage_EC/fill_EC and /MTZ_stage_EC/fill_stage into the file +\DSM2_flow_stage_EC_inputs\updateYYYYMM.dss.

+

-          When all the input timeseries have been QA/QC’d, the +previous version of DSM2 input data must be extended with the updated +data.  in the file \DSM2_flow_stage_EC_inputs\updateYYYYMM.dss. Copy the +previous version of DSM2 input data, histMMYY.dss, into the folder +\merge_data, update the names of merged files, the name of output file, +and the versions in the file pathnames.txt, and run the Python script:

+

Python merge_data.py pathnames.txt

+

-          Check all the pathnames in the latest version match those +in DSM2 inp files.

+

Step 4: Gate operations update (under the folder \gateoperations)

+

1.      Delta Cross Channel

+

Download the gate operations from +https://www.usbr.gov/mp/cvo/vungvari/Ccgates.pdf

+

2.      Clifton Court gate

+

Ask Liu, Siqing (Siqing.Liu@water.ca.gov) from O&M. O&M collects the +inputs without QA/QC and updates DSM2 monthly, so their inputs can be +taken as the preliminary inputs.

+

3.      South Delta temporary barriers and Montezuma Slough gate

+

Go to +http://baydeltaoffice.water.ca.gov/sdb/tbp/web_pg/tempbsch.cfm, +or ask Michal Burn, South Delta Section to get the temporary barriers +gate operations, or ask O&M. The available values of the gate parameters +are listed in the file \gateoperations\barriers_values_03082012.xlsx. +The Vertical Datum in the Excel file is NGVD29, while that in the gate +operation DSS file is NAVD88. When the DSS file is updated, the Datum +difference must be counted.

+

4.      Tune the gate operation times

+

All the information of gate operations collected above are added in +\gateoperations\gate-v8-YYYYMM.dss. The collected gate operation +schedules are normally not the actual gate operations. After all the +inputs of DSM2 historical simulation are prepared, pre-run DSM2 HYDRO +and check if the simulated upstream and downstream stages/flows of each +gate match the observed stages/flows. If not, tune the gate operation +times until the simulated stage variations in time reflect the gate +operation schedules accurately. Table 2 is the list of CDEC stations to +check the gate operation schedules. CDEC stages or flows in the table +have been downloaded and saved in the file ancillary201712.dss in Step +2.  

+

Table 2 The CDEC stations to check gate operations

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
BarriersRKI or Channel NoCDEC station
Old River @ HeadUpstreamMossdaleMSD
DownstreamChannel 55, ROLD074 OH1
Old River at TracyUpstreamROLD047OAD
DownstreamROLD046OBD
Grant Line CanalUpstreamChannel 205DGL
DownstreamCHGRL009GLC
Middle RiverUpstreamRMID027MTB
DownstreamChannel 135No station
Delta Cross ChannelDownstreamChannel 365DLC
Montezuma SloughUpstreamCollinsville at Sac. RiverCSE
DownstreamRoaring RiverMSL
+

Attachments:

+

+plots_for_chapters.png +(image/png)
+ +MRZ_EC_input.png (image/png)
+ +sample_file_1.png (image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/Versions/index.html b/reference/Versions/index.html new file mode 100644 index 00000000..30279f66 --- /dev/null +++ b/reference/Versions/index.html @@ -0,0 +1,509 @@ + + + + + + + + + + + + + + + + + + Versions - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Versions

+

If grid/arc is changed (move/split/merge) at any boundaries of the +Delta,  CALSIM → DSM2 preprocess scripts (and DSM2 configuration) should +change, i.e. a different version. Some typical keywords are usually used +as part of the scenario version name:

+
    +
  • Existing (EST) as the current Delta condition (could be different as + time goes), No Action Alternative (NAA) as future scenarios without + major grid change, Proposed Alternative (PA) as future scenarios + with any major grid change (construction, etc)
  • +
  • Level of development (LOD, 2005, 2030 etc) represent Land of Use + info, etc. Since census are not usually conduct very frequently, + different scenarios could use the same LOD.
  • +
  • Sea level rise projection (SLR, 15cm, 45cm etc) represent climate + change scenarios.
  • +
+

California Water Fix settings of Calsim and DSM2 (page 70, Table B-8) +could be referred as an example.

+

http://baydeltaconservationplan.com/Libraries/Dynamic_Document_Library/Final_EIR-EIS_Appendix_5A_-CALSIM_II_and_DSM2_Modeling_Simulations_and_Assumptions_Appendix-_Section_B.sflb.ashx

+

Other than the above, operation change or constraints of standard +usually only affects CalSIM results, not DSM2 settings. Thus the DSM2 +preprocess and configuration don't need changes, i.e. we can just place +in the results. A practical routine is use the same file +name/modifier/path for various scenarios within 1 version category +(modify folder name, or use unique name for the post-process).

+

Some commonly used versions in the office are listed below.

+
    +
  • Original scripts package in DSM2/scripts or vista/scripts/dsm2. This + is the original version.
  • +
  • CH2M helped preparing CWF related EST, NAA, PA, in combination of + LOD and SLR. The earlier 2 are widely used as template in recent + years.
  • +
  • SWP Delivery Capability Report (DCR), Water Storage Investment + Program (WSIP)
  • +
  • Most widely used versions are in DSM2 v806 for now. Effort has been + made to update it to DSM2 v812 with 1 practice for EST. (Annual + Report 2017 Chapter 1)
  • +
  • One recent related practice is a new version for CALSIM3 to + DSM2 in SWP + Fingerprinting + study.
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/reference/index.html b/reference/index.html new file mode 100644 index 00000000..c275c8b9 --- /dev/null +++ b/reference/index.html @@ -0,0 +1,483 @@ + + + + + + + + + + + + + + + + + + Reference - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/releasenotes/index.html b/releasenotes/index.html new file mode 100644 index 00000000..9ee17599 --- /dev/null +++ b/releasenotes/index.html @@ -0,0 +1,479 @@ + + + + + + + + + + + + + + + + + + + + Release Notes - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Release Notes

+

Release Notes here

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/search/search_index.json b/search/search_index.json new file mode 100644 index 00000000..290492ae --- /dev/null +++ b/search/search_index.json @@ -0,0 +1 @@ +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"DSM2","text":""},{"location":"#overview","title":"Overview:","text":"

The Delta Simulation Model II (DSM2) is a one-dimensional mathematical model for dynamic simulation of one-dimensional hydrodynamics, water quality and particle tracking in a network of riverine or estuarine channels. DSM2 can calculate stages, flows, velocities, mass transport processes for conservative and non-conservative constituents including salts, water temperature, dissolved oxygen, and trihalomethane formation potential, and transport of individual particles. DSM2 thus provides a powerful simulation package for analysis of complex hydrodynamic, water quality, and ecological conditions in riverine and estuarine systems.

DSM2 currently consists of three modules, all of which come with the current distribution: HYDRO, QUAL, and PTM. HYDRO simulates one-dimensional hydrodynamics including flows, velocities, depth, and water surface elevations. HYDRO provides the flow input for QUAL and PTM. QUAL simulates one-dimensional fate and transport of conservative and non-conservative water quality constituents given a flow field simulated by HYDRO. PTM simulates pseudo 3-D transport of neutrally buoyant particles based on the flow field simulated by HYDRO. PTM has multiple applications ranging from visualization of flow patterns to simulation of discrete organisms such as fish eggs and larvae.

DSM2 is currently in version 8.2.2. Please send comments to Min Yu minyu@water.ca.gov.

"},{"location":"#for-more-information","title":"For more information","text":"

DSM2 Learning Series DSM2 source code GitHub repository

"},{"location":"DSM2/","title":"DSM2","text":""},{"location":"DSM2/#search-this-documentation","title":"Search this documentation","text":"

Search

"},{"location":"DSM2/#general-description","title":"General Description","text":"

DSM2\u00a0is a river, estuary, and land modeling system.

  • River\u00a0- Can simulate riverine systems, and has been extended from Sacramento to Shasta Dam. Also has been tested with high flow/stage simulations for flood modeling.
  • Estuary\u00a0- Completely flexible estuary model; stages and flows may be specified at boundary and internal points.
  • Land\u00a0- Includes effects from land-based processes, such as consumptive use and agricultural runoff.

DSM2 can calculate stages, flows, velocities; many mass transport processes, including salts, multiple non-conservative constituents, temperature, THM formation potential and individual particles.

The model is copyrighted by the State of California, Department of Water Resources. It is licensed under the GNU General Public License, version 2. This means it can be copied, distributed, and modified freely, but you may not restrict others in their ability to copy, distribute, and modify it. See the License for more details. Also notice the\u00a0list of protected routines.

"},{"location":"DSM2/#background-and-reference-information","title":"Background and Reference Information","text":""},{"location":"DSM2/#dsm2-model-documentation","title":"DSM2 Model Documentation","text":""},{"location":"DSM2/#dsm2-calibration-memo","title":"DSM2 Calibration Memo","text":""},{"location":"Delta_DSM2_Documentation/","title":"Delta DSM2 Documentation","text":"

DSM2 is mostly applied in the Sacramento-San Joaquin Delta. The specific information that pertains to the Delta DSM2 is documented here.

  • RKI Referenced Output
  • CALSIM - DSM2 Integration
  • Update DSM2 historical simulation
"},{"location":"dsm2_learning_series/","title":"DSM2 Learning Series","text":"

The DSM2 Learning Series is a series of hybrid live and online classes held by the Delta Modeling Section.

"},{"location":"dsm2_learning_series/#quick-start","title":"Quick Start","text":"

The DSM2 Quick Start Training was held in 2023. The slides shown in the training are available on our GitHub repository, and videos of the training are available on our Youtube playlist.

"},{"location":"dsm2_learning_series/#dsm2-planning-studies","title":"DSM2 Planning Studies","text":"

The DSM2 Planning studies training will be held in October 2023. The slides shown in the training will be available on our Github repository, and videos of the training will be available on our Youtube playlist.

"},{"location":"getting_started/","title":"Getting Started","text":"

Welcome to DSM2. This section of the documentation is intended to help you acquire and install dsm2, test that it is working, and understand the layout of the distribution. After you have taken these steps, you will probably want to tackle the tutorials in the /tutorials folder of the distribution or consult the documentation or grid map in the /documentation folder -- a link has been provided on the start menu to make the documentation easier to find.

"},{"location":"getting_started/#getting-dsm2","title":"Getting DSM2","text":"

DSM2 is distributed by the California Department of Water Resources Delta Modeling Section. You can find the model at the CNRA Open Data web site. Currently we distribute Windows executables, tutorials, source code, and a data package with templates for common studies.

"},{"location":"getting_started/#installing-dsm2","title":"Installing DSM2","text":"

DSM2 has been tested on Windows 10. DSM2 is distributed as a .zip file, which contains the model executables and input files. You should not unzip it to a location with spaces. We recommend D:\\delta\\dsm2 or C:\\delta\\dsm2. Unzip it to a drive with a lot (gigabytes) of room. This will make it easier to use in-place

"},{"location":"getting_started/#recommended-third-party-extras","title":"Recommended Third Party Extras","text":"

DSM2 comes with a numerical model and scripting capabilities. It is easier to use the model if you also have a text editor with syntax highlighting, a tool for differencing text files (\"diff tool\"), a DSS viewer and an hdf5 viewer.

Open command window here: Follow the instructions here to add the option 'Open command window here' to the Windows Explorer context menu. You will need administrative privileges to do this, and you should only do this if you are comfortable modifying the registry in Windows 10. This will allow you to open a command window by right clicking on a folder in Windows explorer. DSM2 models and Python scripts can be run in the command window. The tool is essential for working with DSM2 efficiently. Notepad++ is a text editor that works well with DSM2 input data and integrates nicely into the Windows file explorer. We support the editor with syntax highlighting. Here are some instructions for configuring Notepad++ DiffMerge is a good free differencing tool for text files. Beyond Compare is an inexpensive commercial product that is intuitive and also compares Word files. Vista, one of the first graphical tools for examining data in HEC-DSS format, comes with DSM2 in the /dsm2/vista/bin directory. HEC-DSSVUE is distributed by HEC and is actively maintained. Most people use DSSVUE as their primary tool with Vista for specific tasks. An Excel add-in for DSS data is also available on the HEC web page. HDFView and HDF-Explorer are two independent browsers for the HDF5 file format. This lets you look inside a tidefile, one of the outputs of the model. You only need one of them.

"},{"location":"getting_started/#test-launching-dsm2","title":"Test Launching DSM2","text":"

The first step is to see whether the installation was successful. To do this, get a DOS-style command prompt window and type from any location:

C:\\>hydro -v\nDSM2-Hydro 8.2.2  Git Version: 1468 Git GUI: 54a9cc3c\nUsage: Hydro input-file\n

If you got a message like the one above, you are up and running!

If instead you get this:

C:\\>hydro -v\n'hydro' is not recognized as an internal or external command,\noperable program or batch file.\n

...you have a path problem and we need to straighten it out.

Your next stop should be to read the Tutorials. The Basic Tutorials (Tutorial 1-6) feature most of the nuances of the model using a very simple grid and are an excellent way to learn about the model -- including subtleties that are new or have caused confusion in the past. The Delta Tutorial series are more applied -- tasks on the Delta. Doing some of Delta Tutorial #1 as a motivator, and then tackling the simple ones is a quick way to get a sense of the model.

"},{"location":"installation/","title":"Installation","text":"

How to install DSM2 for Windows

"},{"location":"overview/","title":"Documentation","text":""},{"location":"overview/#overview","title":"Overview:","text":"

Welcome to DSM2. This documentation is an ever-developing effort to get you started using the model. In learning DSM2 you will want to make use of the following resources

  • The tutorials, which will get you used to the user interface and running the model. There are two major groups of tutorials: one builds up a simplified model from the channels to boundaries to operating rules. Its instructions are very explicit. The second group of how-to tutorials focuses on large practical problems, examples of real studies on the Sacramento-San Joaquin Delta.
  • The documentation. This documentation explains how to use the graphical and text input of the model.
  • Fellow modelers and water managers.
"},{"location":"overview/#data-management","title":"Data management","text":"

Input data for DSM2 is managed using a combination of a relational database with a graphical interface for tabular input, HEC-DSS for time series storage and a small amount of text inputs for frequently-changing components of the model.

There is a section in this documentation for each input table. There is also a section for each type of text input. The two do not overlap, except for scalars (text is a last minute opportunity to change a scalar) and output (you can add output in text).

"},{"location":"overview/#user-interface","title":"User Interface","text":"

The DSM2 database interface is shown below. Important nomenclature is identified -- the terms in this figure are used freuently in this documentation.

"},{"location":"releasenotes/","title":"Release Notes","text":"

Release Notes here

"},{"location":"Table_of_Contents/","title":"Table of Contents","text":""},{"location":"Table_of_Contents/#contents","title":"Contents","text":"
  • Overview
  • Getting Started\u00a0
    • Download and installation
    • Recommended third party extras
    • Test Launching DSM2
  • Layers, Priority, Data Management
  • Operating Rules
  • Reference
    • Boundary Flow
    • Boundary Stage
    • Channels
    • Channel Initial Condition
    • Environment substitution (ENVVAR)
    • Gate
    • Group
    • Input Climate
    • Input Gate
    • Input Transfer Flow
    • IO Files Section (non-DSS)
    • Node Concentration
    • Operating Rule
    • Output: Channel (including source tracking)
    • Output: Gate
    • Output: Reservoir (including source tracking)
    • Particle Flux Output
    • Particle Group Output
    • Particle Insertion
    • Particle Filter
    • Particle Reservoir Filter
    • Rate Coefficient
    • Reservoir
    • Reservoir Concentration
    • Reservoir Initial Condition
    • Scalar
    • Source Flow (HYDRO at nodes)
    • Source Tracking (see Output: Reservoir and Channel)
    • Tidefile
    • Transfer
"},{"location":"build/CMake_Build/","title":"CMake Build","text":""},{"location":"build/CMake_Build/#introduction","title":"Introduction","text":"

DSM2 consists of many projects and third party libraries. This has meant running a number of scripts as documented in\u00a0CMake Build. It has also entailed opening multiple solution files and build libraries by hand. Furthermore, the compiler and visual studio upgrades were slow to adopt due to the cost of breaking changes when upgrading versions. And finally and most importantly, the daunting task of building all these files for a different OS such as linux.

"},{"location":"build/CMake_Build/#cmake","title":"CMake","text":"

cmake is a system that generates the build system. In other words cmake does not help in building the libraries and projects but encompasses the high-level, OS and build system independent instructions for generating those systems. Its introduction and tutorial could be found at https://cmake.org/

A first effort at a cmake generated build system is working for VS2015 with the latest intel compiler on Windows. The instructions for this and the files needed have been checked into github master

  • A\u00a0CmakeLibraryMacro.txt is placed at DSM2 root path, with global macro and environment settings
  • A CmakeLists is placed under each project/sub-subject to govern its compilation.
  • build*.bat is created for DSM2, input_storage, oprule, respectively, to contain the key cmake commands (listed in the following sections).
  • After compilation, the exe/dll are generated under subfolders BUILD\\release or\u00a0BUILD\\debug.
"},{"location":"build/CMake_Build/#dsm2-core-project","title":"DSM2 core project","text":"
CMake Instructions\nCreate a build directory BUILD under dsm2\n\n\nmkdir BUILD\ncd BUILD\n\nFirst setup path \n\"C:\\Program Files (x86)\\IntelSWTools\\compilers_and_libraries\\windows\\bin\\compilevars.bat\" ia32 vs2015\n\nNext execute for VS2015 the cmake command\ncmake -G \"Visual Studio 14 2015\" ..\\src\n\nFinally open the DSM2.sln file in VS 2008 and compile\n\nor compile from command line with this command\ncmake --build . --target ALL_BUILD --config Debug\ncmake --build . --target ALL_BUILD --config Release\n
"},{"location":"build/CMake_Build/#input-storage-and-oprule","title":"Input Storage and Oprule","text":"

The libraries input_storage and oprule are built to support DSM2 core project. (confirm their building success before compile core project)

cd input_storage\nmkdir BUILD\ncd BUILD\ncmake -G \"Visual Studio 14 2015\" ..\\\ncmake --build . --target ALL_BUILD --config Debug\ncmake --build . --target ALL_BUILD --config Release\n
cd oprule\nmkdir BUILD\ncd BUILD\ncmake -G \"Visual Studio 14 2015\" ..\\\ncmake --build . --target ALL_BUILD --config Debug\ncmake --build . --target ALL_BUILD --config Release\n
"},{"location":"build/CMake_Build/#third-party","title":"Third Party","text":"

DSM2 relies on third parties and requires the required libraries below.\u00a0Usually DSM2 just uses the built-up libraries; however, sometimes (when version/environment changes), the following libraries need re-build. Note DSM2 only requires some specific subsets in these libraries and these specification could be found in CmakeLibraryMacro.txt

"},{"location":"build/CMake_Build/#boost","title":"boost","text":"

Run bootstrap.bat to build b2.exe; then run b2.exe

Note b2.exe is required to run in complete mode (the default minimal mode won't build the required library; run b2 --help for details)

Linux sample for boost build

#use this script to get started with new boost library\n#cp this script into new boost library source directory\n./bootstrap.sh --with-toolset=intel-linux\n./b2 --clean\n./b2 -a toolset=intel-linux link=static variant=release --with-filesystem --with-regex --with-system --with-test\n

Windos sample for boost build

./b2 -a runtime-link=static --with-filesystem --with-regex --with-system --with-test\n
"},{"location":"build/CMake_Build/#hdf5","title":"HDF5","text":"

Go to CMake-hdf5-1.8.20 folder to run batch of the relevant version, e.g. build-VS2015-32.bat, which builds HDF5-1.8.20-win32.zip

Unzip it and place it under third_party folder.

To build the static libraries which is what is needed with DSM2 static build see this\u00a0HDF5 CMake Static Build

"},{"location":"build/CMake_Build/#heclib","title":"heclib","text":"

Build heclib\\windows_sources\\windows_build_MT_default_settings.bat

  • MT is for static version as we needed (MD for dynamic)
  • Make sure setting compiler path as the required version (as exemplified in core project)

For using Visual Studio 2017

Install Visual Studio 2017:

In addition to the standard Visual Studio 2017 installation, download individual components from the VS2017 installer.\u00a0 The individual components needed are:

Run the installer, and click on Modify, then Individual components tab, and check the components, finally click on Modify.

Change the lines in\u00a0build_dsm2_vs2015_32b.bat:

call \"C:\\Program Files (x86)\\IntelSWTools\\compilers_and_libraries\\windows\\bin\\compilevars.bat\" ia32 vs2015

to

call \"C:\\Program Files (x86)\\IntelSWTools\\compilers_and_libraries_2019\\windows\\bin\\compilervars.bat\" ia32 vs2017

cmake -G \"Visual Studio 14 2015\" ..\\src

to\u00a0

cmake -G \"Visual Studio 15 2017\" ..\\src

Rerun the build scripts as instructed above.

"},{"location":"build/CMake_Build/#attachments","title":"Attachments:","text":"

VS2017_individaul_components.jpg (image/jpeg) sum.PNG (image/png)

"},{"location":"build/Cloud_Notes/","title":"Cloud Notes","text":"

This page documents important notes for running DSM2 in the cloud.

DSM2 has been compiled on Ubuntu Linux and Windows. Only 32 bit version has been compiled, the path to 64 bit conversion is much longer due to C/C++ code that needs cleaning up.

"},{"location":"build/Cloud_Notes/#linux","title":"Linux","text":"

DSM2 has been compiled and tested on Linux VMs running Red Hat 4.8 with kernel version 3.10. Static linking does not seem to work as documented and the dependencies on Intel fortran libraries are packaged in a lib/ subfolder.

AWS Linux version is Red Hat 7.3 running kernel 4.14.\u00a0 The following libraries are needed on top of the base image from AWS

#sudo yum upgrade #-- Do this to ensure installs of the below go through\nsudo yum install glibc.i686\nsudo yum install libgcc.i686\nsudo yum install libstdc++.i686\n
"},{"location":"build/Cloud_Notes/#windows","title":"Windows","text":"

Windows version is statically compiled and so the executables should work without any other dependencies.\u00a0

TODO: JRE version and installation\u00a0

"},{"location":"build/DSM2_v82_Compiling_and_Packaging/","title":"DSM2 v82 Compiling and Packaging","text":"

This memo presents step-by-step instruction on compiling and packaging DSM2 version 8 assuming a minor (bug-fix) release.\u00a0

"},{"location":"build/DSM2_v82_Compiling_and_Packaging/#version","title":"Version","text":"

The latest DSM2 release version is 8.2.0.\u00a0The first two digits of DSM2 official version; the 3rd digit is used for minor (bug-fix) releases.

git commit number is also embedded for developer's record, and could be used to check thru '-version' command.

The Git number is embedded automatically at compilation time. The following 3 files contains relevant version info: \u00a0\\dsm2\\src\\common\\version_generate.py \u00a0\\dsm2_distribute\\dsm2\\install\\copy_hydro_qual_ptmDLL_notepad_style.bat \u00a0\\dsm2_distribute\\dsm2\\install\\DSM2setup_Script_Generate.py

*BDO is using an 'unstable trunk' branching strategy in GIT for DSM2 development. *GIT categories: master is for major development and bug fix; branches are usually for separate and time-consuming studies on old versions; Tags are the records for all release versions. *DSM2 version tag (the 3rd digit in the version number) is designed to use odd number for internal use, and even number for formal release\u00a0

"},{"location":"build/DSM2_v82_Compiling_and_Packaging/#checkout","title":"Checkout","text":"

Checkout DSM2 from github: https://github.com/CADWRDeltaModeling/dsm2.git

place the project in local directory, e.g. D:\\dev\\dsm2_master\\

Check out could be conduct at command line

git clone https://github.com/CADWRDeltaModeling/dsm2.git\n

Or use software sourceTree (recommended for later use). Note which branch you're checking out (master by default).

"},{"location":"build/DSM2_v82_Compiling_and_Packaging/#compile","title":"Compile","text":"

Cmake is currently used to control and adapt project compilation (see details\u00a0at CMake Build).

From command line, run cmake batch at the project root path. It calls cmake build of oprule, input_storage, then DSM2 sequentially.

build_all.bat\n

The building results are exe of\u00a0hydro and qual, with dll of ptm, under path \\BUILD\\Release or Debug

The major part of PTM module is written in Java programming language and placed in the source code folder: \\dsm2_v8_1\\src\\ptm. It could be build\u00a0in two ways:

  • ANT: the default compiling procedure is set in build.xml; open the command window and run 'ant' (build.xml as default running file) at current path. The release version of compiled application file will be automatically placed in the folder:\u00a0\\dsm2_v8_0\\src\\ptm\\lib (ptm.jar)
  • Eclipse: DSM2 v82 PTM Compiling with Eclipse
"},{"location":"build/DSM2_v82_Compiling_and_Packaging/#libraries","title":"Libraries","text":"

All the libraries DSM2 needs are precompiled and placed in the folder lib: input storage, oprule, and third parties

If compiling is required, refer their compile details\u00a0at CMake Build.

Since third_party folder is very big and not easy to copy around, one way to share the same package (w/o increase disk occupance) is to use Windows mklink

Use admin right to open a command window and type in:

mklink /D {target location}\\third_party {original location}\\third_party

For internal users use this command

mklink /D third_party\u00a0\\cnrastore-bdo\\Delta_Mod\\Share\\DSM2\\compile_support\\third_party

"},{"location":"build/DSM2_v82_Compiling_and_Packaging/#debug","title":"Debug","text":"

Cmake also built up the project Visual Studio\u00a0solution, which could be used do the code debug.

See the following example for debug setting in Visual Studio 2015.

Make sure the 'debug info' is on the project you're working on (the current VS set it as off by default).

"},{"location":"build/DSM2_v82_Compiling_and_Packaging/#packaging","title":"Packaging","text":"

Download from share folder and change folder name to dsm2_distribute\\

under branch (e.g. master), aside with dsm2

1. Generate tutorial PDF files:

a) Delete all PDF files in \"dsm2_distribute\\dsm2\\tutorials\\pdf\"

b) Run \"doc2pdf.vbs\" in \"\\dsm2_distribute\\dsm2\\install\\ to generate PDFs from tutorial word documents.

2. Copy compiled binaries to distribution folder:

a) Check the DSM2 version and paths to the compiled binaries are correct in the batch file \"copy_hydro_qual_ptmDLL_notepad_style.bat\" in the folder \\dsm2_distribute\\dsm2\\install\\

Be aware of the version consistence for the 3 control files mentioned in 'DSM2 Versioning'; if not, correct them and re-compile from 6-4 in the previous section

b) Run the batch file \"copy_hydro_qual_ptmDLL_notepad_style.bat\".

Manually copy hydro.exe, qual.exe, ptm.dll, ptm.jar to the folder \u00a0\\dsm2_distribute\\dsm2\\bin

3. Generate packaging script:

a) Check the DSM2 version is correct in the Python script \"DSM2setup_Script_Generate.py\" in the folder \\dsm2_distribute\\dsm2\\install\\

Be aware of the version consistence for the 3 control files mentioned in 'DSM2 Versioning'; if not, correct them and re-compile from 6-4 in the previous section

b) Run this Python script to generate Inno Setup script \"DSM2setup_v8.iss\".

4. Create DSM2 installation file:

Run \"DSM2setup_v8.iss\" with Inno Setup Compiler v5.2.3 The installation file named \"DSM2setup_8.X.Xrelease.XXXX.exe\" will be created in the same folder, \\dsm2_distribute\\dsm2\\install\\

5. Quick-test installer:

Test installation on a clean machine. Run historical hydro, qual_ec and ptm on study templates.

6. Tag and version increment:

Create release tag for both \"dsm2\" source code on github and \"dsm2_distribute\" folders on share-folder.\u00a0

For future usage,\u00a0immediately increment DSM2 version number (3rd digit to the next odd number) in the following three files:

  • \\dsm2\\src\\common\\version_generate.py \u00a0
  • \\dsm2_distribute\\dsm2\\install\\copy_hydro_qual_ptmDLL_notepad_style.bat \u00a0
  • \\dsm2_distribute\\dsm2\\install\\DSM2setup_Script_Generate.py
"},{"location":"build/DSM2_v82_Compiling_and_Packaging/#attachments","title":"Attachments:","text":"

debug_on.PNG (image/png) package1.PNG (image/png) debug1.png (image/png) debug.png (image/png) vers.PNG (image/png) lib.PNG (image/png) local1.PNG (image/png) github.PNG (image/png) srctree0.PNG (image/png) local0.PNG (image/png) image2017-6-13_15-47-59.png (image/png) image2017-6-13_15-47-43.png (image/png) image2017-6-13_15-47-35.png (image/png) image2017-6-13_15-47-17.png (image/png) image2017-6-13_15-46-58.png (image/png) worddav07c1c902559a15d9cb8d941d966322cb.png (image/png) worddavb0ec1d6cc7478dc4ec73bc27abb42880.png (image/png) worddav0037c3b8067a8dd0d52094029690277b.png (image/png) worddav490ef33751ab42acaa896e9bb7dc2dc7.png (image/png) worddav73b529f2e3382f4bb77f505185a10945.png (image/png) worddavc7e39738a4a8caa213b31d248d81f87b.png (image/png) worddavb58034debe15b3d2514f722580c782ad.png (image/png) worddav1c1324ef8177e0822bb62d9cdf8fdb05.png (image/png) worddava9503121f9ac50fd0060de1b95c6decc.png (image/png)

"},{"location":"build/DSM2_v82_PTM_Compiling_with_Eclipse/","title":"DSM2 v82 PTM Compiling with Eclipse","text":"

This memo presents step-by-step instruction on establishing editable project, compiling and packaging DSM2 (version 8) PTM module in Eclipse. Note this is the version for PTM source code editing purpose. The version for straight compiling is in the DSM2 compiling memo: DSM2_v8_0_Compiling_and_Packaging.

Required Software and Source Code Check out

  1. Java Development Kit http://java.sun.com/javase/downloads/index.jsp
  2. Eclipse-jee-galileo-win32 +http://www.eclipse.org/downloads/packages/release/galileo/sr2+
  3. PTM Java source code are checked out in the DSM2 Compile & Package document, with the whole DSM2 coding package:

The specific folder directory on DWR SVN server is: +http://dminfo.water.ca.gov:8686/svn/repository/models/branches/dsm2_v8_0/src/ptm/DWR/DMS/PTM+ and could be put at following directory on local computers: +D:\\delta\\models\\dsm2_v8_0\\src\\ptm+

Build up project Method 1: with Eclipse project description file File -> Import -> Existing Projects into Workspace -> Next -> Select root directory (Browse) to where .project resides -> Ok -> Finish

Method 2: w/o Eclipse project description file

  1. Set up workspace

Double click to open Eclipse and set up the workspace (default path as d:\\workspace); *This is the path for source code, libraries, compiled bytecode files, and et cetera. Source codes are copied from the location specified in the previous step. *Changes made in workspace could be compiled and tested independently, then copied back to the DSM2 model package folders, and finally committed to the SVN server.

  1. Create the PTM project in workspace

File->New->Java Project 'Project name' input e.g. ptm 'Project layout' select as 'Use project folder as root for sources and class files' (with src and bytecode stored at one place; the other option is also ok for use) Next->Libraries->Add External JARs->Select 3 jar files (COM.jar, edu.jar, xml.jar) at directory: D:\\delta\\models\\dsm2_v8_0\\src\\ptm\\lib Finish

  1. Create Package to include source codes:

Right click ptm in the Package Explorer->New->Package Input Package Name under Source folder ptm: DWR.DMS.PTM Import java source codes General->File System->Next From directory->Browse (DSM2 PTM Java src folders) ->OK Check PTM box on the left window (to include all the java files)->Finish *Eclipse would automatically create 2 packages for the sub-folders: DWR.DMS.PTM.behave DWR.DMS.PTM.tools *If some error msg like 'access restriction' comes out: Try to remove the JRE System library from Project ptm->Properties->Java Build Path->Libraries, and add it again Compile and debug PTM

  1. Compile source code to bytecode

Project->Build Project Bytecodes (.class) are stored at the same place as source codes (.java) Automatic compilation could be enabled by Project -> Build Automatically

  1. Run/Debug Variables Configuration

Set up the PTM input study file in IDE configuration Menu Run/Debug-> Run/Debug Configuration Java Application->New e.g. Main Tab: Name: MainPTM Main Class: DWR.DMS.PTM.MainPTM Argument Tab: Program arguments: historical_ptm.inp (the sample PTM input file in a DSM2 v8 historical study; make sure the hydro file has been run first and h5 file exists) VM arguments: -ss1m -mx512m -oss1m Other: D:\\delta\\dsm2_v8\\study_templates\\historical Environment Tab: DSM2_HOME d:\\delta\\dsm2_v8 Path d:\\delta\\dsm2_v8\\bin

  1. Run/Debug PTM

Switch to Debug Mode, which enable breakpoint setting and variable tracking Run->Run/Debug MainPTM for calculation checking

Export back to DSM2 package

  1. Combine the PTM Java bytecotes and related libraries into a JAR file

File->Export->Java->JAR file Check ptm box on the left to select all the bytecode files and libraries Select the export destination: D:\\workspace\\ptm\\ptm.jar

  1. Copy the JAR file to the DSM2 distribution binary folder for packaging:

+D:\\delta\\models\\dsm2_distribute\\dsm2\\ptm\\lib+ Other PTM related files, e.g. ptm.dll, are compiled in related C++ and Fortran projects of DSM2 Visual Studio solution. Please see the document for details.

"},{"location":"build/DSM2_v82_PTM_Compiling_with_Eclipse/#attachments","title":"Attachments:","text":"

worddav44179cfae725fe5f39c7b918214b4d68.png (image/png) worddav21d73a7e0e593bfd3b0107605d9a879c.png (image/png) worddave3944aea30dfda849398f8b2d35e5ca0.png (image/png) worddav166bb58986798c0c19da41ec64557bc5.png (image/png) worddav0e06297fcdac03d480d52d4303a8f664.png (image/png) worddav974c8dfc9bac21524c2e729c9ddf7938.png (image/png) worddav4acaef0242e743933e42a19aac4be497.png (image/png) worddava8b154265c7f02598531839e7c2691d2.png (image/png) worddav92a273d21de65a713f3c72c11cdd9d2f.png (image/png) worddavca883dcc4d073032cba8861c22f82023.png (image/png) worddav6787134635058c6f17e54d0d92dfbceb.png (image/png) worddava5ae1f4d3a3483dc4320496d098a0bc6.png (image/png) worddav7728359a07b9c4738d8f2fc9e1dcbe00.png (image/png) worddavba79a0a41e348cbfbb98c6c379e69d73.png (image/png) worddavaa8687d7165c7477fa2fe14970938e00.png (image/png) worddav0497cdcda22d3ff9a889885b64ed225a.png (image/png) worddav911bf0d4648403b4cd5c945075026a73.png (image/png) worddav7a2f929ee2f765ed2f7e2f8c617f4e9a.png (image/png) worddavd39e409d006ca7cd1fececf0ba72f5d5.png (image/png) worddaveec0e1cc149a8a2c30bf9d635c413987.png (image/png) worddav769c68dd95f5a8d049cd75682818d784.png (image/png)

"},{"location":"build/Debugging_JNI_code_with_Eclipse_and_Visual_Studio_20xx_/","title":"Debugging JNI code with Eclipse and Visual Studio (20xx)","text":"

This document shows how to setup an Eclipse project (e.g. PTM) with JNI (native C/C++/Fortran code) with Visual Studio (e.g. 2015)\u00a0

  1. Use 32 bit version of Eclipse (e.g.\u00a0eclipse-java-neon-2-win32) and setup PTM project

    1. Browse over to the checked out version of dsm2 and look under dsm2/src/ptm.\u00a0
    2. Create a debug configuration. Make sure to point to the directory where PTM.dll is built in debug mode. E.g. d:\\dev\\dsm2\\master\\dsm2\\BUILD\\Debug\\ is where cmake builds the Debug version of the projects
  2. Start debug from Eclipse and make sure to pause on some line of code before JNI code is invoked.\u00a0

  3. Use Visual Studio code and make sure to be in Debug configuration. Then attach to the running Java process in 2 using the remote attach to process

    You will need to be able to identify the process in 2 by its PID or its name.\u00a0

  4. Set breakpoint in native code

  5. Release the paused Java code in the Eclipse debugger. When the native code trigger is hit it will stop at the above breakpoint.

"},{"location":"build/Debugging_JNI_code_with_Eclipse_and_Visual_Studio_20xx_/#attachments","title":"Attachments:","text":"

image2020-11-10_13-26-14.png (image/png) image2020-11-10_13-24-53.png (image/png) image2020-11-10_13-22-32.png (image/png) image2020-11-10_13-16-30.png (image/png) image2020-11-10_13-14-39.png (image/png)

"},{"location":"build/Developer_and_Build/","title":"Developer and Build","text":""},{"location":"build/Developer_and_Build/#basic-steps","title":"Basic steps","text":"
  • Install prerequisite softwares

  • Checkout\u00a0DSM2 from github repository\u00a0

  • Compile and build input_storage and oprule libraries (may also need third party)

  • Compile and build DSM2 (That should result in the hydro.exe and qual.exe and ptm.dll in the build folders)

  • Test and validate the newly compiled

  • Copy and update DSM2 distribution package

  • Package for DSM2 new release

DSM2 version 82* compilation and packaging assuming a minor (bug-fix) release.\u00a0

  • DSM2 source codes and its relevant support party are placed on open source platform github (https://github.com/CADWRDeltaModeling/dsm2) for version control,
"},{"location":"build/Developer_and_Build/#required-software","title":"Required Software","text":"
  1. Visual Studio 2015 (check its installation and management details at\u00a0Intel Compiler Installation for Windows)
  2. Intel Composer for Fortran and C++ (Parallel Studio 2019)
  3. Cmake 3.14\u00a0https://cmake.org/\u00a0(better use a latest stable version, not *rc)
  4. Git https://git-scm.com/downloads
  5. Source Tree (free git client, optional) https://www.sourcetreeapp.com/
  6. Flex and Bison packages in Cygwin http://www.cygwin.com/setup.exe\u00a0 (make sure\u00a0{cywin}/bin in the environment path)
  7. Inno Setup Compiler v5.2.3 http://files.jrsoftware.org/is/5/isetup-5.2.3.exe
  8. Python 3 http://www.python.org/download/
  9. Java Development Kit\u00a0(32-bit)\u00a0 http://java.sun.com/javase/downloads/index.jsp
  10. Microsoft Office 2010
  11. Apache ANT http://ant.apache.org/bindownload.cgi

Note: make sure all software have their binary, header, or library folder set in the environment variables. (given that Department Virtual Machines may prohibit editing, users can still edit in their own account. Be aware of the software version which could fail some step of compiling)

"},{"location":"build/Developer_and_Build/#validation","title":"Validation","text":"

To test new compiled DSM2 and see its difference from older version, the following tools are often used:

  • DSM2-vista\u00a0Compare DSS Files Tool\u00a0requires output settings (pathnames) exactly the same, but provides a quick summary of accumulated difference, especially useful when we want to confirm if two versions are the same.\u00a0
  • DSM2-vista\u00a0Compare DSS Tool is flexible to compare between different pathnames, yet requires users set up configuration one-by-one.
  • HEC-DSSVue has a compare function in its 'Tools' menu
"},{"location":"build/Developer_and_Build/#attachments","title":"Attachments:","text":"

image2019-11-15_10-58-45.png (image/png) image2019-11-15_10-54-29.png (image/png) image2019-11-15_10-53-40.png (image/png) DSM2_v8_0_PTM_Compiling_eclipse.docx (application/vnd.openxmlformats-officedocument.wordprocessingml.document) DSM2_v8_0_Compiling_and_Packaging.docx (application/vnd.openxmlformats-officedocument.wordprocessingml.document)

"},{"location":"build/Docker_Builds/","title":"Docker Builds","text":"

Docker is a great way to packaging software for linux and is essential for working in the cloud platforms (AWS and Azure).

DSM2 docker build repo is here\u00a0http://dwrrhapp0179.ad.water.ca.gov/gitea/nsandhu/dsm2-docker

"},{"location":"build/HDF5_CMake_Static_Build/","title":"HDF5 CMake Static Build","text":"

DSM2 binaries are built with static links (no DLLs are needed). However HDF5 1.8.10+ does not support static builds as there are fundamental problems if parallel support is enabled. However DSM2 does not need the parallel support and static builds are very convenient for us.

The information here was documented in\u00a0 DSM2-117 - Update HDF5 library to 1.8.19 or later Done \u00a0and the kernel of information is included here for future HDF5 builds for static linking.

"},{"location":"build/HDF5_CMake_Static_Build/#this-blog-entry-explains-how-to-build-with-mt-flag","title":"This blog entry explains how to build with /MT flag","text":"

https://blog.afach.de/?page_id=421

"},{"location":"build/HDF5_CMake_Static_Build/#hdf5-static-with-mt-flag-compilation-auto-compile-script-visual-studio","title":"HDF5 Static (with /MT flag) compilation Auto compile script \u2013 Visual Studio","text":"

This is a compile script that compiles HDF5 libraries from source statically with multithread support, i.e., \u201c/MT\u201d flag in Visual Studio. automatically.

"},{"location":"build/HDF5_CMake_Static_Build/#warning","title":"Warning","text":"

After discussing with one of the programmers\u00a0of HDF5, it was made clear that\u00a0linking statically works safely only in the condition HDF5 library\u00a0wasn\u2019t compiled with parallel support.

"},{"location":"build/HDF5_CMake_Static_Build/#the-script","title":"The script","text":"

The script involves going to the file config\\cmake\\UserMacros\\Windows_MT.cmake and copying the file\u2019s contents to \u201cUserMacros.cmake\u201d. The same is also done for ZLib and SZip after extracting them, and rezipping them again.

@echo off\n::The following is the name of the folder of HDF5 source\nset \"hdffolder=hdf5-1.8.16\"\n\n::add a new line then add /MT compilation options\ncall echo & echo. >> %hdffolder%\\UserMacros.cmake\ncat %hdffolder%\\config\\cmake\\UserMacros\\Windows_MT.cmake >> %hdffolder%\\UserMacros.cmake\nfor %%i in (%hdffolder%\\UserMacros.cmake) do sed -i \"s/\\\"Build With Static CRT Libraries\\\" OFF/\\\"Build With Static CRT Libraries\\\" ON/g\" %%i\n\n::add a new line then add /MT to SZip after extracting it, and then recompress it\ngzip -dc SZip.tar.gz | tar -xf -\nmv SZip.tar.gz SZip-dynamic.tar.gz\ncall echo & echo. >> UserMacros.cmake\ncat SZip\\config\\cmake\\UserMacros\\Windows_MT.cmake >>SZip\\UserMacros.cmake\nfor %%i in (SZip\\UserMacros.cmake) do sed -i \"s/\\\"Build With Static CRT Libraries\\\" OFF/\\\"Build With Static CRT Libraries\\\" ON/g\" %%i\ntar cf SZip.tar SZip\\\ngzip SZip.tar\nrm -r SZip\n\n::do the same to ZLib\ngzip -dc ZLib.tar.gz | tar -xf -\nmv ZLib.tar.gz ZLib-dynamic.tar.gz\ncall echo & echo. >> UserMacros.cmake\ncat ZLib\\config\\cmake\\UserMacros\\Windows_MT.cmake >>ZLib\\UserMacros.cmake\nfor %%i in (ZLib\\UserMacros.cmake) do sed -i \"s/\\\"Build With Static CRT Libraries\\\" OFF/\\\"Build With Static CRT Libraries\\\" ON/g\" %%i\ntar cf ZLib.tar ZLib\\\ngzip ZLib.tar\nrm -r ZLib\n\nbuild-VS2013-32.bat\n
"},{"location":"build/HDF5_CMake_Static_Build/#requirements","title":"Requirements","text":"

1-\u00a0CMake\u00a0(add its executable folder to path) 2-\u00a0[GOW

| | |---------------------------------------------------------------------------------------------------------------------------------------------------------------| | rel=\"nofollow\">https://github.com/bmatzelle/gow/downloads]3- href=\"https://www.visualstudio.com/en-us/products/visual-studio-express-vs.aspx\" rel=\"nofollow\">Visual Studio or C++ Express\u00a0(this you can get for free from Microsoft, but I assume you know enough about this already since you\u2019re here) |

Note: If CMake won\u2019t show in path in command prompt, run prompt as administrator, or use this command to add the path you want to the environment variable %PATH% \u00a0set PATH=C:\\Program Files (x86)\\CMake\\bin;%PATH%

Gow is GNU tools for windows, like tar, gzip and sed. These are important for the script.

Whether you\u2019d like to have a 32-bit or 64-bit version of visual studio used depends on the environment variables that are defined.\u00a0The easiest way is to run the run command prompt for the version you want. For example, in Visual Studio 2013, if one goes to Start, then types in quick search \u201cVisual\u201d, you\u2019ll find a folder called \u201cVisual Studio Tools\u201d. This folder will have both command prompts with the relevant environment variables. The following shows this folder:

"},{"location":"build/HDF5_CMake_Static_Build/#prepare-to-run-the-script","title":"Prepare to run the script","text":"

Go to\u00a0this page, and download the CMake source. Extract it; put the script in a file there; if the version you want to compile is different than the one in the script, modify the folder name; and finally run the script. After the script is finished, you\u2019ll have a compressed zip file with compiled source and an installer executable.

The file HDF5CompileScript.bat is where I copied the script of compile that I created. Just run this script through the command prompt of visual studio and it\u2019ll compile.

"},{"location":"build/PTM_DLL_Static_Build/","title":"PTM DLL Static Build","text":"

On windows it is not straightforward to compile to a DLL by mixing /MT (static) and /MD (dynamic) linking flags. However we want to create exes (hydro, qual, gtm) that have no dependencies on system libraries at runtime. This means we compile all libraries with /MT flags.

DSM2-174 - PTM DLL compiling issue Resolved

PTM is a dll so we have to override the libraries the compiler and linker search for by default.

For the standard C/C++ libraries more information at\u00a0https://docs.microsoft.com/en-us/cpp/c-runtime-library/crt-library-features?view=vs-2019

For the Intel libraries more information at\u00a0https://software.intel.com/en-us/articles/libraries-provided-by-intelr-c-compiler-for-windows-and-intel-parallel-composer

The following libraries are then ignored so that the static versions of the libraries are packaged into the .dll file itself. This increases the size of the DLL but then during runtime there are no other dependencies on any system or intel libraries. In other words, it can stand alone and run.

The libraries ignored are\u00a0

msvcrt.lib;libmmd.lib; mscvcprt.lib;libucrtd.lib;\n

If you ever need to see all the libraries being used turn the /VERBOSE feature on the linker options in Visual Studio

"},{"location":"build/PTM_DLL_Static_Build/#attachments","title":"Attachments:","text":"

image2019-4-29_12-54-20.png (image/png)

"},{"location":"build/Troubleshooting/","title":"Troubleshooting","text":"
  • DSM2 inputs are off by 1DAY
  • DSM2 Seems to indicate missing irregular (gate position) data
"},{"location":"build/Troubleshooting/#attachments","title":"Attachments:","text":"

image2020-1-15_8-29-2.png (image/png) image2020-1-15_8-21-6.png (image/png) image2020-1-15_8-20-52.png (image/png) image2020-1-15_8-19-39.png (image/png) image2020-1-15_8-19-11.png (image/png) image2020-1-15_8-18-51.png (image/png) image2020-1-15_8-17-37.png (image/png) image2020-1-15_8-17-16.png (image/png) image2020-1-15_8-16-44.png (image/png)

"},{"location":"build/Troubleshooting_Eclipse/","title":"Troubleshooting Eclipse","text":"

Eclipse Tips

"},{"location":"calibration/Calibration_Memo/","title":"Calibration Memo","text":"

Links needed here to the Calibration Memo

"},{"location":"calibration/Calibration_Memo/#attachments","title":"Attachments:","text":"

worddav6ba4ded1dc3ad6637d41e1cab3b0ac67.png (image/png) worddav31e2094a0a30cbe2b2f68fd01ded456e.png (image/png) worddav87fd53a098c286a8a765388c0fad3872.png (image/png) worddav858bf66748366e3ac5293838851bbe82.png (image/png) worddavadc677a457af87d9f187accca7e45d26.png (image/png) worddavd77556e825a07cebeb566d19d30ea639.png (image/png) worddav3935cdcf6dfd7bee0e4e0e6f048bb4b6.png (image/png) worddav2a890e9699b30d8faddba3233e1b781e.png (image/png) worddavecaeebb6ecfc62b86e1550341e82617a.png (image/png) worddav5b932002973e34737ab05aec46278f41.png (image/png) worddavc5d792e811d89f630b2b3cb9afddbdf8.png (image/png) worddav867f30829ec80c80af38c4e36e1035ec.png (image/png) worddav9be8d4cf703e37b5d1723218d05f472b.png (image/png) worddavb1c809d7f14b67e2012575da1956af47.png (image/png) worddavd6a8df921a23a973f133ab348abec003.png (image/png) worddavf6bb9ddadcecd2bfc8abe14f18e5a7cc.png (image/png) worddav338764269ad059ddfcaf2d744b740015.png (image/png) worddavb3e65a876c6772d6a9ad05aca7b33c01.png (image/png) worddav4cacbb121a7846293e65de267a4f33a8.png (image/png) worddav98e5298510a8f5160c95d49e5f7f7bad.png (image/png) worddav3d94a048a0fda9ef690959da7c503a0d.png (image/png) worddav99cfce7a2d97c6e99e26dbcecb6c889b.png (image/png) worddav97f65dbaf0d433fd0b85fb28edce59d2.png (image/png) notes for calibration refine.txt (text/plain) Memo_DSM2_V8.1Beta_Calibration.docx (application/vnd.openxmlformats-officedocument.wordprocessingml.document) Hydro_Calibration_Stations.jpg (image/jpeg) Hydro_calibration_notes.txt (text/plain) flow_2009.pdf (application/pdf) flow_2008.pdf (application/pdf) flow_2007.pdf (application/pdf) flow_2002.pdf (application/pdf) EC_calibration_notes.txt (text/plain) EC_Calib_station_2001_2008.jpg (image/jpeg) EC.pdf (application/pdf) stage_2009.pdf (application/pdf) stage_2008.pdf (application/pdf) stage_2007.pdf (application/pdf) Stage_2002.pdf (application/pdf)

"},{"location":"calibration/Mini_Calibration_2009_/","title":"Mini Calibration (2009)","text":""},{"location":"calibration/Mini_Calibration_2009_/#attachments","title":"Attachments:","text":"

DSM2_Recalibration_102709_doc.pdf (application/pdf)

"},{"location":"faqs/","title":"DSM2 FAQ","text":"
  • How does DSM2-Hydro calculate channel volume?
"},{"location":"faqs/#how-to-articles","title":"How To articles","text":"
  • How to read hdf5 output files
  • Delta Tutorial 9 - DO Simulation
  • Delta Tutorial 8 - Temperature Simulation
"},{"location":"faqs/DSM2_-_How_to_read_hdf5_output_files/","title":"DSM2 - How to read hdf5 output files","text":"

DSM2 writes output in HDF5 format. This format can be read by Vista and vscript\u00a0

"},{"location":"faqs/DSM2_-_How_to_read_hdf5_output_files/#step-by-step-guide","title":"Step-by-step guide","text":"

To open a HDF5 file

  1. Open Vista\u00a0
  2. Drag and drop or use Session > Open > Tidefile from the menu options
  3. Select the data items needed and use the\u00a0 Data > Export > Export Data To DSS menu item to export the Data to DSS files

Video of How to read DSM2 - HDF5 files using VISTA

Alternatively here is a snippet of vscript code that does something similar

Getting average concentrations from Qual HDF5 file

from vtidefile import opentidefile\nfrom vdss import writedss\nfrom vutils import *\nimport vdisplay\nfrom vdisplay import plot\nimport sys\nimport string\n\ndef get_avg_conc(tidefile, chan, twstr):\n    tf=opentidefile(tidefile)\n    if twstr != None:\n        print 'Timewindow: %s'%twstr\n        tw=timewindow(twstr)\n    else:\n        tw=None\n    refs=tf.find(['','^%s$'%chan,'AVG CONC'])\n    if refs and len(refs)==1:\n        print \"Getting data %s\"%(str(chan))\n        if tw!=None:\n            ref=DataReference.create(refs[0],tw)\n        else:\n            ref=refs[0]\n        return ref.data\n    else:\n        raise \"No data found for %s in file %s\"%(chan, tidefile)\nif __name__ == '__main__':\n    if len(sys.argv) != 2:\n        print \"Usage: vscript \n    tidefile=sys.argv[1]\n    twstr=\"01JUL2014 0000 - 01AUG2014 0000\"\n    chans=[291,290,436,435,434,433]\n    chan_concs=[]\n    for chan in chans:\n        chan_concs.append(get_avg_conc(tidefile, chan, twstr))\n\n    for conc in chan_concs:\n        plot(conc)\n

DSM2-vista also supports export data to hec-dss format (One or multiple timeseries path could be selected) See the following menu option as example.\u00a0

"},{"location":"faqs/DSM2_Seems_to_indicate_missing_irregular_gate_position_data/","title":"DSM2 Seems to indicate missing irregular (gate position) data","text":""},{"location":"faqs/DSM2_Seems_to_indicate_missing_irregular_gate_position_data/#problem-hydro-displays-the-following","title":"Problem: Hydro displays the following.","text":"

Error in reading time-varying data: Current time is 01SEP2001 2400; earliest data time for /HIST+GATE/MTZSL/BOATLOCK_OP//IR-DECADE/DWR-ESO/ is\u00a0

Brad Tom\u00a0Related to\u00a0Jira issue. I think we should open an issue there as you are doing the practical fix for this known issue\u00a0

Key Summary T Created Updated Due Assignee Reporter P Status Resolution DSM2-106 Timeseries interpolation [<img src=\"http://msb-jira/secure/viewavatar?size=xsmall&avatarId=10303&avatarType=issuetype\" class=\"icon\" alt=\"Bug\" />](http://msb-jira/browse/DSM2-106?src=confmacro) Nov 30, 2011 Jan 03, 2022 Nicky Sandhu Ines Ferreira <img src=\"http://msb-jira/images/icons/priorities/medium.svg\" class=\"icon\" alt=\"Medium\" /> Resolved Won't Do

1 issue

But there are data values in this time series with dates before the current date.

The problem is: When using an IR-DECADE dss path, there must be a value with a timestamp that is at the beginning of the current decade. In this case, a record is required that has a timestamp of 31DEC2000 2400.

"},{"location":"faqs/DSM2_Seems_to_indicate_missing_irregular_gate_position_data/#to-fix-this","title":"To fix this:","text":"
  1. Tabulate the data in HEC-DssVue. There is no beginning of decade timestamp.

2. Turn on \"Allow Editing\"

3. Select the row before the end of the previous decade, and select \"Insert Rows\":

4. Change \"Number Rows\" to 1.

5.Enter the timestamp that is needed, with a value equal to the value in the previous record:\u00a0

6. Save the data:

"},{"location":"faqs/DSM2_Seems_to_indicate_missing_irregular_gate_position_data/#attachments","title":"Attachments:","text":"

image2020-3-25_11-0-58.png (image/png) image2020-3-25_10-39-44.png (image/png) image2020-3-25_10-39-7.png (image/png) image2020-3-25_10-38-32.png (image/png) image2020-3-25_10-38-16.png (image/png) image2020-3-25_10-37-42.png (image/png) image2020-3-25_10-36-6.png (image/png)

"},{"location":"faqs/DSM2_inputs_are_off_by_1DAY/","title":"DSM2 inputs are off by 1DAY","text":"

DSM2-241 - Check daily inflows and warn if INST-VAL Open

A problem was reported with a run that was supposed to be a historical run with inflows scaled up by a factor.\u00a0

  1. Here's a plot of model output at Vernalis
  • Blue=historical stage output
  • Red=scaled up stage output
  • Green=historical flow output
  • Black=scaled up flow output

On February 14, 1992, both the scaled up flow and stage outputs are higher than historical.

2. The problem was caused by the \"Type\" of the scaled up flow input tine series. The user had created a series with a type of INST-VAL.

This results in changes in inflow taking effect at the end of the day rather than the beginning of the day.

One way to tell that a 1DAY time series is PER-AVER vs INST-VAL is that HEC-DSSVue plots PER-AVER time series as a square wave, but not INST-VAL.

3. To check Type and to change it, in HEC-DSSVue, right click on the series and select Edit, and\u00a0

use the dropdown to select a new type.

4. After re-running with the change, results are as expected.

"},{"location":"faqs/DSM2_inputs_are_off_by_1DAY/#attachments","title":"Attachments:","text":"

image2020-1-15_8-19-11.png (image/png) image2020-1-15_8-20-52.png (image/png) image2020-1-15_8-21-6.png (image/png) image2020-1-15_8-29-2.png (image/png) image2020-1-15_8-19-39.png (image/png)

"},{"location":"faqs/Generating_DSM2_Hydro_boundary_conditions_from_the_DCR_2017_CALSIM_II_output/","title":"Generating DSM2 Hydro boundary conditions from the DCR 2017 CALSIM II output","text":"

run DCR 2017 with CWF script.zip

I tried to generate the\u00a0DSM2 Hydro boundary conditions\u00a0from the DCR 2017 CALSIM II output (with a 2020 development level) .\u00a0 But for this CALSIM II output, we don\u2019t have a corresponding script that can be used to generate the boundary conditions. The closest script available is from CALWATERFix for the previous version of CALSIM II output (with a 2005 development level).\u00a0 So I used this script.\u00a0 After some basic edits (e.g., change directory, file name, etc.),\u00a0 the script ran and generated the boundary conditions.\u00a0 However, because the script and the CALSIM II output are not paired, using the old script for the new output could introduce errors.\u00a0 I compared the boundary conditions generated from DCR 2017\u00a0 with those for CALWATERFix (generated with the same script but from an older version of CALSIM output).\u00a0 The patterns are match but there are noticeable differences at some spots (e.g., Aug 24 1994 or July 19 2001).\u00a0 I haven't figured out what exactly caused the differences, but could be the different assumptions used in the CALSIM II studies.\u00a0\u00a0

I have a read me file inside of the zip file to provide the instruction about how to run the script.\u00a0 The zip file is too big to upload so I left two files out.\u00a0 You can download the left out files here:

  1. this file should be in .\\run DCR 2017 with CWF script\\timeseries\u00a0Planning_Tide_82years.zip
  2. this file should be in .\\run DCR 2017 with CWF script\\studies\\planning\\timeseries\\CALSIM2020D09EDV__2017DCR_OldANN_NewWSIDI-SWPDemand_x64_20171115.zip
"},{"location":"faqs/Generating_DSM2_Hydro_boundary_conditions_from_the_DCR_2017_CALSIM_II_output/#attachments","title":"Attachments:","text":"

2020D09EDV__2017DCR_OldANN_NewWSIDI-SWPDemand_x64_20171115.zip (application/zip) Planning_Tide_82years.zip (application/zip) run DCR 2017 with CWF script.zip (application/zip)

"},{"location":"faqs/How-to_articles/","title":"How-to articles","text":"

Add how-to article

Title Creator Modified DSM2 - How to read hdf5 output files Nicky Sandhu Jan 27, 2021 Delta Tutorial 9 - DO Simulation Nicky Sandhu Jan 27, 2021 Delta Tutorial 8 - Temperature Simulation Nicky Sandhu Jan 27, 2021"},{"location":"faqs/How_does_DSM2-Hydro_calculate_channel_volume_/","title":"How does DSM2-Hydro calculate channel volume?","text":"
  • Hydro creates virtual cross-sections by interpolating cross-section input (see Tutorial 1: Channels) to create virtual cross-sections. Virtual cross-sections are created and used internally in Hydro by interpolating cross-section input.
  • Virtual cross-sections are usually not seen by the user. If the variable printlevel >= 5 in the SCALAR input section, virtual cross-sections will be written to output .hof file.
  • The 2012 Annual Report describes a change in the way volume is calculated: it used to use only the area of the cross-section in the middle of a computational reach, but now it uses all 3 of the cross-sections in a computational reach. Also, it describes an important change to the longitudinal interpolation used to create virtual cross-sections.
  • The 2016 Annual Report, section 3.4.2 indicates that the volume of a channel is calculated by multiplying the average of two cross-sectional areas by the distance between them. This process would then be repeated twice for each computational reach to find the volume.
  • Hydro will not converge well if cross-sectional area is not interpolated correctly. Previously, area at a given elevation between cross-section layers was calculated by interpolating area linearly between two layers. It has been changed to a = a1+(.5*(w1+w2))*h, where
    • a1 = area at lower elevation
    • w1 = width at lower elevation
    • w2 = width at higher elevation
    • h = distance from lower elevation to given elevation
"},{"location":"faqs/How_does_DSM2-Hydro_calculate_channel_volume_/#references","title":"References","text":"

Annual reports can be found here.

Ferreira I. and Sandhu, N. 2016 \"Chapter 3: DSM2 Extension: A GIS-Based Approach.\"\u00a0 In: Methodology for Flow and Salinity Estimates in the Sacramento-San Joaquin Delta and Suisun Marsh. 37th Annual Progress Report to the State Water Resources Control Board. California Department of Water Resources.

Liu L., Ateljevich E., and Sandhu P. 2012. \u201cChapter 2: Improved Geometry Interpolation in DSM2-Hydro.\u201d In: Methodology for Flow and Salinity Estimates in the Sacramento-San Joaquin Delta and Suisun Marsh. 33rd Annual Progress Report to the State Water Resources Control Board. California Department of Water Resources.

Tom B. 1998. \u201cChapter 6: Cross-Section Development Program.\u201d In: Methodology for Flow and Salinity Estimates in the Sacramento-San Joaquin Delta and Suisun Marsh. 19th Annual Progress Report to the State Water Resources Control Board. California Department of Water Resources.

"},{"location":"faqs/PTM_Frequently_Asked_Questions_FAQ_/","title":"PTM Frequently Asked Questions (FAQ)","text":"

Moving from ptm source directory in github. This should belong in the documentation and not buried in source code

"},{"location":"faqs/PTM_Frequently_Asked_Questions_FAQ_/#q1-what-is-ptm","title":"Q1. What is PTM?","text":"

PTM is Particle Tracking Model. It is written partly in Java and partly in Fortran. I/O is mainly handled by Fortran.

"},{"location":"faqs/PTM_Frequently_Asked_Questions_FAQ_/#q2-what-are-the-inputs-to-ptm","title":"Q2. What are the inputs to PTM?","text":"

1. Hydrodynamic information: This is the dynamic information about flow, flow-area, (therefore velocity) and depth. This information comes from the tidefile which is generated by hydro. One has to make sure that in addition to the tidefile the correct network configuration is being used.\u00a0 This information is typically done by the following io structure.

TIDEFILE START_DATE START_TIME END_DATE END_TIME FILENAME generic none length none tidefile.out END

2. Network configuration: This defines how channels and reservoirs are linked up and what their characteristics such as x-section, length, etcetra are. This also is read by Fortran. - channels.inp, xsects.inp, junctions.inp, reservoirs.inp, translations.inp

Refer to DSM2 docs

3. Particle information:

a. Type of particle: Uptil now we have been dealing only with neutrally-bouyant or particles with a certain falling velocity. For other kind of particles such as fish no IO had been decided.

b. Particle insertion information: Number of particles, time of insertion, location of insertion and duration of insertion. Refer to ptm_insertion.inp.

PARTINP [NODE NPARTS SDATE STIME EDATE ETIME] | 44 500 01jan1990 0133 05jan1990 0333

This means that insert 500 particles at 44 evenly distributed from start time to end time

[NODE NPARTS SDATE LENGTH] 44 600 01jan1990 5days END

c. Run time information

This is similar to run time settings for hydro and qual. Refer to DSM2 docs.

d. PTM has the following scalars

SCALAR ptm_time_step 15min # PTM time step display_intvl 1hour # how often to display run progress ptm_ivert t # Use Vertical velocity profile ptm_itrans t # Use Transverse velocity profile ptm_iey t # Use transverse mixing ptm_iez t # Use vertical mixing ptm_fallvel 0.05 # settling velociy in ft/s ptm_random_seed 32001 # Starting Random Number Seed ptm_trans_constant 0.06 # Transverse Mixing Constant ptm_vert_constant 0.0067 # Vertical Mixing Constant END

d. IO from PTM

IO_FILES MODEL TYPE IO INTERVAL FILENAME ptm anim out 15min anim.bin # animation file ptm trace out none trace.bin # trace file ptm restart out 6hours restart.out # restart output file ptm restart in none restart.inp # restart input file END

Animation file: Contains the data for the first 100 particles movement every time interval as specified. This is a binary file if the file name does not end in \".out\" else it will be a ascii file. One can use PTM Animator to run the binary file to look at the animation visually

Trace file: The trace file contains the trace of every particle in the system. It records the entrance/exit of a particle into a waterbody such as a channel etcetra. \".out\" for ascii file. The trace file is used to calculate the flux and so the flux may be calculated after the PTM run.

Restart file: This is a snapshot of the current locations of every particle inthe system. Useful mainly for restarting a run from a previously saved state.

Flux information: PARTICLE_FLUX FROM_WB TO_WB INTERVAL FILENAME B_PART chan, 216 | qext, cvp | 15min flux.txt past_CVP res,clfct | qext,swp | 15min flux.txt past_SWP chan,436,53 | chan,442,437 | 15min flux.txt past_Chipps chan, 441 | stage, mtz | 15min flux.txt past_MTZ | qext,div,-cvp, -ccc | 15min flux.txt Ag_Diversions | qext,div | 15min flux.txt All_Diversions END

This instructs ptm to calculate flux from the trace file. This can be done at the end of a ptm run or from a trace file later. Its the users responsibility to provide the correct network configuration The filename flux.txt means the user wants the output in ascii format else it would be flux.dss which is output in dss format. The DSS pathnames B part == B_PART from the above table.

The interval at which the flux is calculated is as given above in the

INTERVAL column

The particle flux is calculated from a waterbody to a waterbody. A waterbody is specified by a type string followed by an identifier. The type string is one of chan, res, qext, stage.

The identifier is either a number for the the chan or a name as defined in the translations.inp file. If a generic type follows in place of the identifier than the flux tracks all particles entering or leaving that particular type of waterbody. If no from_wb or to_wb is defined it is assumed that it is the same as to all waterbodies.

Particle dynamics:

The particle can move in x, y and z directions. However as DSM2 is a 1-D model this information is gleaned by applying a vertical and transverse velocity profile to the average velocity available from the tidefile.

A particle has a deterministic and random component to its movement.

Deterministic component == Average velocity + transformations

"},{"location":"gis/CSDP_Network_File_Format/","title":"CSDP Network File Format","text":"

The CSDP network file stores centerlines, cross-section lines, and cross-section points. The CSDP uses the information in the file to create cross-section input for DSM2.

The format of the CSDP Network file predates the CSDP. It was created by the consultant John Crapuchettes, who created the Bathymetry Data Display (BDD) application, which was the predecessor to the CSDP.

See comments for explanations of the various lines in the file.

;HorizontalDatum: UTMNAD83 ;HorizontalZone: 10 ;HorizontalUnits: Meters ;VerticalDatum: NAVD88 ;VerticalUnits: USSurveyFeet ;Filetype: network ;NumElements: 525 \"1\" 18 2140064.75,1.3689134E7 2139796.0,1.3689089E7 2139320.0,1.3689424E7 2139205.25,1.3689698E7 2139663.75,1.3690571E7 2140078.25,1.3690641E7 2140395.75,1.369087E7 2140713.0,1.3691585E7 2140713.0,1.3691928E7 2140589.75,1.3692493E7 2139928.25,1.3693401E7 2139275.75,1.3693904E7 2139240.5,1.3694195E7 2139390.5,1.3694503E7 2139822.5,1.3695402E7 2139954.75,1.3695905E7 2140422.0,1.369669E7 2140480.5,1.3696834E7 3 \"\" 8 -215.90325927734375,20.427509307861328 -157.9310302734375,12.60617733001709 -54.482757568359375,6.409266471862793 92.41378784179688,-3.2046332359313965 191.72413635253906,1.6023166179656982 315.862060546875,4.247311592102051 346.89654541015625,11.737451553344727 458.6206970214844,20.54054069519043 117.23826599121094 890.1288452148438 \"BT 8/12/2019: cloned from adjacent cross-section to prevent interpolation to improve max area ratio\" \"\" 8 -355.90325927734375,20.427509307861328 -297.9310302734375,12.60617733001709 -194.48275756835938,6.409266471862793 -47.58620834350586,-3.2046332359313965 51.72413635253906,1.6023166179656982 175.86207580566406,4.247311592102051 206.89654541015625,11.737451553344727 318.6206970214844,20.54054069519043 662.5869750976562 903.2123413085938 \"KH,1/30/2019: moved the centerline to better line up with the most recent survey data; re-created the cross-sections\" \"\" 8 -385.9397277832031,32.054264068603516 -220.38327026367188,14.89919376373291 -121.08013916015625,6.431451797485352 -27.00348472595215,1.6935484409332275 54.0069694519043,-2.036290407180786 150.69686889648438,-0.32258063554763794 218.64111328125,19.435483932495117 341.4634094238281,34.15322494506836 9270.8427734375 1192.212646484375 \"KH,1/30/2019: moved the centerline to better line up with the most recent survey data; re-created the cross-sections *nl* *nl* BT 7/24/2019: adjusted to prevent drying up\"

\"2\" 11 2140424.0,1.3696842E7 2141014.75,1.3698247E7 2141700.5,1.369945E7 2143323.0,1.3700658E7 2143420.0,1.3701319E7 2142970.25,1.3701777E7 2141859.25,1.3701848E7 2140686.5,1.3701619E7 2139679.75,1.3701361E7

"},{"location":"gis/CSDP_Network_Summary_Report/","title":"CSDP Network Summary Report","text":"

The CSDP Network Summary Report is created by the CSDP. It can be used to help identify issues and potential issues with cross-sections in the currently loaded network file.

It also contains important comparisons of DSM2 Virtual Cross-Section volume with GIS calculated volumes.

A network summary report uses the following input files:

  1. An existing channels.inp file (such as channel_std_delta_grid_NAVD_20150129.inp). This file is used to get existing channel lengths for comparison, and to determine channel connectivity.
  2. The currently loaded network file.
  3. A DSM2 output (.hof) file which was created from the network file by running DSM2-Hydro with geometry created using the currently loaded network file with printlevel>=5
  4. A 2m meter DEM CutFill validity file, which was created based upon a visual inspection of the extent of the coverage of channel polygons with data in the 2m DEM files, using ArcGIS. If coverage is complete or very nearly complete, the validity is true.
  5. CutFill results files, each containing results from the CutFill operations for a given DEM.
  6. (Optional): a list of channel groups. Default is the list of groups for which polygons were created and used in the CutFill operations: \"448_449_572_573, 439_440_441_451_452_453_454, 438_443_444_450_570_571_574_575,290-294,281_282_295_296_297_301\". You can add to this list.

The report contains, for a given stage (usually 0.0 NAVD)

  1. Channel: The name/number of the DSM2 channel. Could also be a group of channels. Examples: Sherman Lake would be identified as: 290-294, Grizzly Bay would be identified as: 448_449_572_573
  2. Comparison of channels.inp length vs CSDP length:
    1. Channels.inp length: length specified for DSM2 in the DSM2 channels file above.
    2. CSDP length: length calculated by the CSDP that will be used to replace the 'Channels.inp length'.
    3. % change: the change in length CSDP vs Channels.inp
  3. CSDP Average width: For determining GIS volume estimate\u00a0validity\u2013average width should be at least 3 times the DEM grid size.
  4. If CSDP Volume is significantly different from DSM2 Volume, that would mean the effects of interpolation should be considered when modifying cross-sections.
    1. CSDP Volume: Channel volume calculated by CSDP for specified elevation assuming no inter-channel interpolation. Not used for comparison, but may be of interest to some.
  5. Not used for comparison, but may be of interest to some.
    1. CSDP Wetted Area: Wetted area calculated by CSDP for specified elevation assuming no inter-channel interpolation. Not used for comparison, but may be of interest to some.
    2. CSDP Surface Area: Surface area calculated by CSDP for specified elevation assuming no inter-channel interpolation.
    3. CSDP Max Area Ratio: The maximum ratio of cross-sectional areas within a channel using CSDP cross-sections. Important for numerical stability. Max area ratios should be \\< 2.0.
  6. If CSDP Volume is significantly different from DSM2 Volume, that would mean the effects of interpolation should be considered when modifying cross-sections.
    1. DSM2 Volume: Channel volume calculated at specified elevation using virtual cross-sections from DSM2 output file. Used for comparison with GIS volumes.
  7. Not used for comparison, but may be of interest to some:
    1. DSM2 Wetted Area: Wetted area calculated at specified elevation using virtual cross-sections from DSM2 output file
    2. DSM2 Surface Area: Surface area calculated at specified elevation using virtual cross-sections from DSM2 output file
    3. DSM2 Max Area Ratio: The maximum ratio of cross-sectional areas within a channel using virtual cross-sections. Important for numerical stability. Max area ratios should be \\< 2.0.
  8. These results include valid and invalid values (see \"2m Validity\" and \"10m Validity\" below), so these are probably not what you want to use.
    1. GIS 2m Max* Volume: The GIS calculated channel volume, converted to ft3, using 2m DEM.
    2. GIS 2m Max* Area: The GIS calculated 2d area, converted to ft2, using 2m DEM.
    3. GIS 10 Max* Volume: The GIS calculated channel volume, converted to ft3, using 10m DEM.
    4. GIS 10m Max* Area: The GIS calculated 2D area, converted to ft2, using 10m DEM.
    5. DSM2-2m Vol: The difference between the DSM2 virtual cross-section volume and the 2m DEM volume.
    6. DSM2-10m Vol: The difference between the DSM2 virtual cross-section volume and the 10m DEM volume.
    7. 2m Vol % diff: The % difference between the DSM2 virtual cross-section volume and the 2m DEM volume.
    8. 10m Vol % diff: The % difference between the DSM2 virtual cross-section volume and the 10m DEM volume.
    9. CSDP Avg Width: The average width of all the CSDP cross-sections in a channel at the specified elevation.
  9. 2m Width Ratio: the CSDP Avg Width / 2m.
  10. 10m Width Ratio: the CSDP Avg Width / 10m.
  11. 2m Validity: 2m DEM volume and area calculations will be considered valid if a 2m DEM covers (or nearly covers) the entire channel polygon, and the 2m Width Ratio >= 3.0.
  12. 10m Validity: 10m DEM volume and area calculations will be considered valid if the 10m Width Ratio >= 3.0. Coverage is assumed to be complete for all channels.
  13. Valid Values: These are the ones you want to use:
    1. Valid 2m Vol: The value of GIS 2m Volume, if 2m Validity==true, null otherwise.
    2. Valid 10m Vol: The value of GIS 10m Volume, if 10m Validity==true, null otherwise.
    3. DSM2-Valid 2m Vol: The value of DSM2-2m Vol if 2m Validity==true, null otherwise.
    4. DSM2-Valid 10m Vol: The value of DSM2-10m Vol if 10m Validity==true, null otherwise.
    5. Valid 2m Vol % diff: The value of 2m Vol % diff if 2m Validity==true, null otherwise.
    6. Valid 10m Vol % diff: The value of 10m Vol % diff if 10m Validity==true, null otherwise.
  14. CSDP highest bottom elevation: The highest bottom elevation of all the cross-sections within the channel. Can help identify cross-sections that are likely to dry up.
  15. CSDP XS with no points: The indices of the cross-sections in the channel that have no points. These cross-sections should be removed or edited.
  16. CSDP XS within 500.0 feet: The indices of the cross-sections in the channel that are within 500.0 feet of each other. This could help identify duplicate cross-sections or unnecessary cross-sections.\u00a0
  17. These can help identify cross-sections that need to be adjusted to improve Max Area Ratio.
    1. CSDP XS with Min area: The index of the cross-section in the channel that has the smallest area at the specified elevation
    2. CSDP XS with Max area: The index of the cross-section in the channel that has the largest area at the specified elevation
  18. CSDP XS with duplicate stations: The indices of the cross-sections in the channel that have duplicate station values. These cross-section need to be fixed.
  19. We no longer care about negative dConveyance, so these can probably be ignored:
    1. CSDP XS with -dK: The indices of the cross-sections in the channel that have negative dConveyance at any elevation.
    2. CSDP XS with -dK in intertidal zone: the indices of the cross-sections in the channel that have negative dConveyance in the intertidal zone. (intertidal zone is assumed to be limited to the range -2.5 \\< Z \\< 17.5 ft NAVD88)

*When calculating GIS results, some channels overlap more than one DEM.\u00a0 When this happens, only the largest values of Volume and 2D Area are used, because they are assumed to be associated with the DEM that covers a greater portion of the polygon. If the coverage is not complete, the value should be invalidated visually in the \"2m DEM Validity\" file.

Creating the network summary report:

  1. Load a bathymetry file.
  2. Load or create a network file.
  3. Select Network->Reports\u2192Network Summary Report
  4. In the following dialog:\u00a0
  5. Either
    1. click the \"Load Dialog Values\" button to populate the dialog using values read from a file, OR
    2. Populate the dialog one field at a time by clicking the \"Select File\" buttons to specify
      1. the channels.inp file (in the current DSM2 setup, this is channel_std_delta_grid_NAVD-20121214.inp),
      2. optionally a .hof file created by running DSM2 with printlevel=5,
      3. A string representing an array of channels to use for aggregating results (for example, all the channels representing Grizzly Bay). This string can contain lists of channels for which polygons were created for the GIS CutFill operation, or a custom list of channels.
      4. A list of all files containing CutFill results from GIS,
      5. A 2m DEM CutFill Validity file, which I created by visually inspecting the DEM coverage of polygons.
      6. An output path
  6. The results will be written to a tab delimited .txt file specified above. Import the file into Excel, specifying tab as a delimiter.
  7. After the results are written, another window will appear containing graphs of the results.
  8. Save results if desired.

Here is the current network summary report:

There are many rows above the table which define the various quantities. You may want to hide these rows when using the spreadsheet.

"},{"location":"gis/CSDP_Network_Summary_Report/#attachments","title":"Attachments:","text":"

image2019-3-26_14-13-18.png (image/png) image2019-3-25_16-9-52.png (image/png) image2019-3-25_16-9-41.png (image/png) networkSummary.txt (text/plain) image2019-3-25_16-8-30.png (image/png) networkSummary20190308.xlsx (application/vnd.openxmlformats-officedocument.spreadsheetml.sheet) image2019-1-7_14-32-27.png (image/png) networkSummaryWithoutHof.xltx (application/vnd.openxmlformats-officedocument.spreadsheetml.template) networkSummaryWithHof.xltx (application/vnd.openxmlformats-officedocument.spreadsheetml.template) networkSummaryWithoutHof.xlsx (application/vnd.openxmlformats-officedocument.spreadsheetml.sheet) networkSummaryWithHof.xlsx (application/vnd.openxmlformats-officedocument.spreadsheetml.sheet) networkSummaryWithoutHof.txt (text/plain)

"},{"location":"gis/CSDP_Tutorial/","title":"CSDP Tutorial","text":"

Brad Tom (developer of CSDP) gave a presentation on CSDP in Feburary 2009. A recording of this presentation is available as below.

"},{"location":"gis/CSDP_Tutorial/#updates","title":"Updates","text":"

In version 8.x the irregular xsection file format has changed. To change this information to the new format run the script under vista/scripts/dsm2/csdp_geom_converter.py with the location of the directory as input

vscript scripts/dsm2/csdp_geom_converter.py <dir_containing_csdp_calculated_xsections>\n\n\n\nThis will create a irregular_xsections_dsm2.inp which will contain all the cross sections in that directory in the new 8.x format\n
  • CSDP Network File Format
  • CSDP Network Summary Report
  • Exporting Channel Lengths from CSDP Network file
  • Exporting CSDP Information into GIS
  • Extracting Bathymetry Data From An Irregularly Shaped Region
  • Importing Digital Elevation Maps (DEMs) into CSDP
  • Merging multiple versions of network files
"},{"location":"gis/CSDP_Tutorial/#attachments","title":"Attachments:","text":"

csdpWebexClass.pdf (application/pdf) csdpWebexClass.ppt (application/vnd.ms-powerpoint)

"},{"location":"gis/Creating_DSM2_v8.2_GIS_grid_map/","title":"Creating DSM2 v8.2 GIS grid map","text":""},{"location":"gis/Creating_DSM2_v8.2_GIS_grid_map/#creating-shapefiles","title":"Creating shapefiles","text":"

The existing CSDP network file for the DSM2 8.2 network that is incomplete and contains errors. The network file for DSM2 v8.3 is accurate, but contains some additional channels and nodes that were moved. The easiest way to create shapefiles for the DSM2 v8.2 grid is to modify the network and landmark (nodes) for the 8.3 grid.

"},{"location":"gis/Creating_DSM2_v8.2_GIS_grid_map/#verification","title":"Verification","text":"

The goal is to display the pdf gridmap as a background image in ArcGIS to verify that all the channel and node number are correct and in the correct locations.

  1. The following command (using ghostscript in Cygwin) creates a tif file from the dsm2 pdf grid map file: gs -q -dNOPAUSE -sDEVICE=tiffg4 -sOutputFile=gridmappdf.tif \"DSM2_Grid2.0 (1).pdf\" -c quit
  2. Create a copy of the tif file, with \"marsh\" in the filename. This is because the pdf gridmap has the Suisun Marsh disconnected from the delta and printed in a different scale.
  3. Next step is to identify a few landmarks that are easily identifiable on both the pdf grid map and on the basemap in ArcGIS. I chose 3 points: one in the north delta, near the confluence, and in the south delta.\u00a0
  4. In QGIS, select Plugins-Manage and Install Plugins:
  5. Search for \"GDAL\", check the box \"Georeferencer GDAL\", then click close:\u00a0
  6. Select Raster-Georeferencer:\u00a0
  7. Click the Open Raster button\u00a0'
  8. Select the pdf file.
  9. Select Settings-Transformaion Settings
  10. Use the following settings, including an output filename:\u00a0
  11. Click on a point in the map, and enter UTM coordinates, then click OK:\u00a0
  12. When you have specified coordinates for all your points, click the start georeferencing button. A tif file will be created, which you can load into ArcGIS.
  13. In ArcGIS, adjust the layer transparency.\u00a0
  14. Now you can easily compare the pdf gridmap to the GIS data.
"},{"location":"gis/Creating_DSM2_v8.2_GIS_grid_map/#attachments","title":"Attachments:","text":"

image2020-5-12_9-25-2.png (image/png) image2020-5-12_7-18-51.png (image/png) image2020-5-12_7-17-47.png (image/png) image2020-5-12_7-17-15.png (image/png) image2020-5-12_7-16-53.png (image/png) image2020-5-12_7-15-45.png (image/png) image2020-5-12_7-14-51.png (image/png) image2020-5-12_7-14-8.png (image/png) image2020-5-12_7-12-43.png (image/png)

"},{"location":"gis/Cross-Section_Development_Program_CSDP_/","title":"Cross-Section Development Program (CSDP)","text":""},{"location":"gis/Cross-Section_Development_Program_CSDP_/#introduction","title":"Introduction","text":"

Bathymetry data is used by CSDP to draw cross-sections which are then converted to DSM2-Hydro cross sectional input. Furthermore CSDP provides the channel and cross section locations in GIS projection of NAD 27, NGVD 29

CSDP was developed by Brad Tom in the 1990s, and has recently been updated for use in the DSM2 GIS Reference Project.

"},{"location":"gis/Cross-Section_Development_Program_CSDP_/#how-to-get-started-in-using-csdp","title":"How to get started in using CSDP?","text":"

The CSDP Manual is available here\u00a0Cross-Section Development Program (CSDP)

A hands on tutorial and presentation given by Brad Tom in 2009 is a good reference resource.

In version 8.x the irregular xsection file format has changed. To change this information to the new format run the script under vista/scripts/dsm2/csdp_geom_converter.py with the location of the directory as input

vscript scripts/dsm2/csdp_geom_converter.py <dir_containing_csdp_calculated_xsections>\n

This will create a irregular_xsections_dsm2.inp which will contain all the cross sections in that directory in the new 8.x format

CSDP will now create DSM2 geometry input in both the original multi-file format used by older versions of DSM2, and the newer single file format, so the above script is no longer needed.

An ArcGIS extension was developed as a modern replacement for CSDP by Tom Heinzer. However this has not been available publicly yet and the grid and cross sections are still being developed in this tool.

"},{"location":"gis/Cross-Section_Development_Program_CSDP_/#csdp-conversion-to-arcgis","title":"CSDP conversion to ArcGIS","text":"

Using WKT (Well Known Text) format and QGIS (add delimited text layer) capabilities, the information from CSDP files was converted to shapefiles

node.cdl contained the information about the nodes in CSDP corresponding to DSM2 node locations.\u00a0

mjtstrm_vec.cdo contained the outlines of levees and other features which are now redundant given the availability of maps (raster based tile layers) from google, open street etc.

05jul2000.cdn is the channel network which included the centerline of channels and the cross-section created by looking at bathymetry data (those are available separately as large files)

delta_2009Calib.cdn is the channel network for persumably the 2009 calibration.

The files are available on the shared drive (\\cnrastore-bdo\\Delta_Mod\\Share\\maps) from both 2000 (CSDP_Converted_2000Calib.qgs) and 2009 (CSDP_Converted_2009Calib.qgs) calibrations

For more information on DSM2 gridmaps and how they relate to CSDP files, see\u00a0DSM2 Geo referenced grid.

  • Write up needed on CSDP and its successor the ArcGIS extension\u00a0
  • Write up needed using approach by Ines using ArcGIS and python scripts\u00a0
"},{"location":"gis/Cross-Section_Development_Program_CSDP_/#attachments","title":"Attachments:","text":"

CSDP_vs_Channels_inp_Lengths.xlsx (application/vnd.openxmlformats-officedocument.spreadsheetml.sheet) Clifton_court_2011.png (image/png) Clifton_court_1990.png (image/png) RSAC092_2011.png (image/png) RSAC092_1990.png (image/png) RSAN018_2011.png (image/png) RSAN018_1990.png (image/png) RSAN007_2011.png (image/png) RSAN007_1990.png (image/png) csdpmanual.pdf (application/pdf)

"},{"location":"gis/DSM2_Geo_referenced_grid/","title":"DSM2 Geo referenced grid","text":""},{"location":"gis/DSM2_Geo_referenced_grid/#background","title":"Background","text":"

DSM2 input specifies geographically based information such as channel lengths and cross section distances from the upstream node. However the geographically referenced node locations or channel outlines are not directly needed for hydrodynamic calculations.\u00a0

In addition to this the cross sections in DSM2 are based on bathymetry data that is used to generate elevation to cross-sectional property information.\u00a0

Even though this information is not needed directly it is very important to keep the geographically referenced information in sync with the input parameters such as channel length and cross section locations in DSM2 input

There have been different areas for which the grid was developed over time, the Sacramento-San Joaquin Delta, the San Joaquin River Extension and the Aqueduct grid. The one that is most commonly referred to as DSM2 grid is the Sacramento-San Joaquin Delta

"},{"location":"gis/DSM2_Geo_referenced_grid/#sacramento-san-joaquin-delta-grid","title":"Sacramento - San Joaquin Delta grid","text":"

The original DSM2 grid was based on hand calculated distances based on USGS Quad maps of the Delta (circa 1990). These were done on paper maps and the original information has been lost though.

A pdf version of this grid based on presumably this information is often found in circulation. The grid map contained in this pdf was originally created using AutoCAD. However the node locations in this pdf version are clearly not in the stream at many times and certainly not geo-referenced to any projection system. Nodes and channels were not always placed very carefully, presumably because the map was primarily used to identify approximate locations of nodes, channels, and stations, and channel connectivity.

In the late 1990s or early 2000s, a paper copy of the grid was placed on a digitizing tablet, and nodes were digitized by clicking on each one (Amy Little might have done this). The result was a file containing approximate UTM coordinates of each node. This file was used by the DSM2 Cross-Section Development Program (CSDP) to create representations of DSM2 channels and cross-sections.\u00a0

CSDP was developed by Brad Tom and Nicky Sandhu based upon specifications written by Ralph Finch in 1998 to derive the cross sections from bathymetry data, which at the time were mostly single beam soundings of depth that were available over many decades in the Delta. This tool is the basis of the current grid in 2000 and the recalibrated grid in 2009. As a by product of this effort the node locations and channel outlines were stored in UTM coordinates. Originally, these were not used directly to derive channel lengths, but they were used indirectly in determining the distance of a cross-section from the upstream node of a channel. The DSM2 GIS Reference project, which began in 2018, will used CSDP centerlines to determine channel lengths.

CSDP was developed pre ArcGIS and with ArcGIS now being fairly standards in DWR there is a need to provide this geographical information in ArcGIS format. In recent years, Jane Schafer-Kramer created an\u00a0ArcGIS version of this grid.\u00a0Jane developed, under Ralph Finch's guidance, an ArcGIS referenced grid by manually putting nodes at the closest location based on the pdf version of the grid. Again the channel lengths from these would not match either CSDP or the original grid as it is an independent manual effort. Furthermore there would be a mismatch to the location of the cross-section positions.\u00a0

In 2012, Tom Heinzer was contracted to develop an ArcGIS based extension to allow a user to develop cross-sections from DEM which in turn is based on interpolations of depth sounding data. This again is a work in progress and cannot import the current cross-sectional profiles available in CSDP.

In 2017, CSDP grid data for the 2009 calibration was imported into ArcGIS and along with it the channel outlines and node locations. The channel outlines in ArcGIS were used to calculate lengths for the channels and these were then compared to the current grid.\u00a0 There were many mismatches discovered and these should be addressed in future efforts

"},{"location":"gis/DSM2_Geo_referenced_grid/#2009-grid","title":"2009 Grid","text":"

The 2009 Grid is used for DSM2 v8.2. It is similar to the pdf gridmap, but it includes some upper sacramento river changes.\u00a0

The node locations and the associated channel network lengths do have a match with the 2000 calibration files (spot checked). However, the 2009 CH2MHill\u00a0mini calibration adjusted node positions, channel lengths, and cross-sections for channels 412-418.\u00a0 The changes made in these channels were incorporated into DSM2, and are included in DSM2 versions as recent as v8.2.0, which is the current release as of 10/2019. However, we did not get any CSDP or GIS data from CH2MHill.\u00a0 Node locations were reverse engineered using the mini calibration lengths, starting with the common node position from channel 412. The overall sum of the length (reach 412-418) was unchanged and this assumption allows for a reasonable reverse engineering effort.\u00a0

This reverse engineered effort is available on the shared drive as shapefiles\u00a0\\cnrastore-bdo\\Delta_Mod\\Share\\maps\\csdp_2009_calib_converted\\CSDP_Channels_Adjusted_MiniCalib.shp (channels) and\u00a0\\cnrastore-bdo\\Delta_Mod\\Share\\maps\\csdp_2009_calib_converted\\CSDP_Nodes_Adjusted_MiniCalib.shp.\u00a0The Nodes shapefile is missing some files, and cannot be loaded into ArcGIS. It was loaded into OpenJUMP, and exported to\u00a0\\cnrastore-bdo\\Delta_Mod\\Share\\maps\\csdp_2009_calib_converted\\CSDP_Nodes_Adjusted_MiniCalib_Recovered.shp. These files are also available in\u00a0\\nasbdo\\Modeling_Data\\DSM2_GIS_CSDP_gridmaps\\GISGridmapV8.2.

These layers are the closest approximation to the grid used for DSM2 v8.2.\u00a0

There is a large discrepancy in the channel length for channel 422 between cross channel and northern head of Georgiana slough. CSDP and ArcGIS calculations put it at 3300 feet while in DSM2 input files it is 5300 feet. This is not an isolated incidence, there are many others as documented in this CSDP_vs_Channels_inp_Lengths.xlsx

"},{"location":"gis/DSM2_Geo_referenced_grid/#dsm2-v81-and-v82-grid","title":"DSM2 v8.1 and v8.2 grid","text":"

For version 8.1 and 8.2, use this grid for referencing DSM2 elements approximately. The channels and nodes layers are incomplete, not very accurate, and contain errors.

Shapefiles are available in \\nasbdo\\Modeling_Data\\DSM2_GIS_CSDP_gridmaps\\GISGridmapV8.2\\

"},{"location":"gis/DSM2_Geo_referenced_grid/#dsm2-v83-grid","title":"DSM2 v8.3 grid","text":"

The 2019 grid is used for DSM2 v8.3, which is under development, and will be the result of the DSM2 GIS Reference Project.

Three shapefiles\u00a0(located here: \\nasbdo\\Modeling_Data\\DSM2_GIS_CSDP_gridmaps\\GISGridmapV8.3) each have been created from the CSDP network (channel centerlines) and landmark (nodes) data for both the 2009 calibration (DSM2 V8.2) and the 2019 calibration (DSM2 V8.3). The shapefiles were created by exporting network and landmark data from the CSDP to WKT files and importing the results into QGIS, then saving to shapefiles. This is intended to be a first step toward creating a georeferenced grid map.\u00a0Shapefiles are available in \\nasbdo\\Modeling_Data\\DSM2_GIS_CSDP_gridmaps\\GISGridmapV8.3\\

  1. dsm2_channels_centerlines contains the channel centerlines as created in the CSDP. Many channels have endpoints that are not located at the node; this was done to improve the accuracy of the DSM2 channel volume.\u00a0Also, many centerlines do not follow the actual channel centerline perfectly.
  2. dsm2_channels_straightlines contains straight lines connecting the two endpoints of each CSDP centerline.
  3. dsm2_nodes contains the CSDP landmark data. The node locations were previously not very accurate; they have now been corrected.
  4. dsm2_boundary_flow_nodes contains points placed at the locations of nodes where boundary flows are applied.
  5. dsm2_boundary_stage_node contains a point placed at the location of the node where the boundary stage is applied
  6. dsm2_gates contains points placed at the approximate location of the channel centerline near each gate. In DSM2, gates are located at the ends of channels. The points in this layer is intended to represent the approximate location in DSM2, and not necessarily the physical location of the gate.
"},{"location":"gis/DSM2_Geo_referenced_grid/#future-directions","title":"Future Directions","text":"

We need a georeferenced gridmap. It should have the following features:

  1. Display nodes as circles with numbers inside.
  2. Display straightline channels with numbers, and an arrow indicating positive flow direction.
  3. Display channels derived from CSDP centerlines, with numbers, and an arrow indicating positive flow direction.\u00a0
  4. straightline channels and CSDP centerline channels should be different colors.
  5. Useful for printing on a plotter.
  6. Easy to modify when CSDP node locations or channels change.\u00a0
  7. Good contrast with background, so we can easily determine connectivity and read all the numbers.\u00a0

  8. Michael Mehrdadi is working on an ArcGIS gridmap using the shapefiles for the 2019 grid.\u00a0

  9. Hans Kim\u00a0is working on a Google Earth gridmap. This will likely be a useful training tool, and may have other uses.\u00a0
    • The current version (as of 10/31/2019) of the gridmap is found here: DSM2_Grid_191029.kml. It can be opened with Google Earth or imported into Google Map.
    • Updates will be made as new shapefiles become available.
"},{"location":"gis/DSM2_Geo_referenced_grid/#attachments","title":"Attachments:","text":"

DSM2_Grid_191029.kml (application/octet-stream)

"},{"location":"gis/DSM2_Sacramento_San-Joaquin_Delta_Grid/","title":"DSM2 Sacramento San-Joaquin Delta Grid","text":""},{"location":"gis/DSM2_Sacramento_San-Joaquin_Delta_Grid/#introduction","title":"Introduction","text":"

The main area of application for DSM2 is the Sacramento San-Joaquin Delta. This grid was developed over the years. A commonly used version is available as a PDF. Vamsi Sridharan made a stitched version with Suisun bay from the same PDF available here.

"},{"location":"gis/DSM2_Sacramento_San-Joaquin_Delta_Grid/#arcgis-version","title":"ArcGIS version","text":"

In recent years, Jane Schafer-Kramer created an ArcGIS version of this grid. This map representation will be refined and available with the ArcGIS X-section editing tool that is now in beta testing.

To view channels colored by mannings or dispersion, channels.inp was imported (from DSM2 v 8.1.2) as a table. This was then joined with \"DSM2 Channels\" table in ArcGIS on the channel number field. Then symbology can be used to display mannings attribute in the joined table. The product is available here\u00a0\\nasbdo\\Delta_Mod\\Share\\maps\\Delta Stations with DSM2 Grid Mannings N Colored.mpk

"},{"location":"gis/DSM2_Sacramento_San-Joaquin_Delta_Grid/#attachments","title":"Attachments:","text":"

DSM2_Grid2.0_updated.pdf (application/pdf) Delta_Stations_with_DSM2_Grid.mpk (application/octet-stream) DSM2_Grid2.0.pdf (application/pdf)

"},{"location":"gis/Exporting_CSDP_Information_into_GIS/","title":"Exporting CSDP Information into GIS","text":""},{"location":"gis/Exporting_CSDP_Information_into_GIS/#background","title":"Background","text":"

CSDP contains channel outline, cross-section locations, and cross-section profile as well as node locations. These are referenced in NAVD88 vertical datum and NAD83 horizontal datum in UTM Zone 10N projection.

"},{"location":"gis/Exporting_CSDP_Information_into_GIS/#methods","title":"Methods","text":"

QGIS is a tool that can import WKT ((https://en.wikipedia.org/wiki/Well-known_text) format into a text based layer that can be then exported ArcGIS.\u00a0

There is Java code available for\u00a0

  1. Exporting CSDP channel outlines to WKT.\u00a0https://github.com/CADWRDeltaModeling/dsm2-vista/blob/master/dsm2-input-model/src/gov/ca/dsm2/input/csdp/CSDPChannelNetworkToWKT.java
  2. Exporting CSDP node locations to WKT.\u00a0https://github.com/CADWRDeltaModeling/dsm2-vista/blob/master/dsm2-input-model/src/gov/ca/dsm2/input/csdp/CSDPNodeCDLToWKT.java

  3. Nicky SandhuNeed to make these standalone generic utilities to be run from command line

  4. Brad Tomcan you take a look at this code and see if it can be integrated into CSDP easily
"},{"location":"gis/Exporting_Channel_Lengths_from_CSDP_Network_file/","title":"Exporting Channel Lengths from CSDP Network file","text":"

CSDP currently has the ability to output just the channel ids and lengths.

  1. Use the Network | Export Options menu item to select only channel lengths output in station elevation format.\u00a0
  2. Then use Network | Save As menu to save to file which should only then have channel id and length in output.
"},{"location":"gis/Extracting_Bathymetry_Data_From_An_Irregularly_Shaped_Region/","title":"Extracting Bathymetry Data From An Irregularly Shaped Region","text":"

Using the CSDP, create a new centerline using the Centerline-Create menu item.

The name of the centerline does not matter.

Add points to the centerline until it outlines the data you want to extract. See example below. The endpoints do not need to be in the same place. A polygon will be created whose vertices are all of the centerline points, so the first and last points will be connected. Save the network file.

I used a simple Java program called ExtractShipChannelLeveesFromYoloBypassDEM, which uses hard-coded filenames for both the input (network file and the bathymetry file) and the output (bathymetry file). Eventually this code will be added to the Bathymetry menu in the CSDP, which will export the data surrounded by a polygon created from the selected centerline to a specified filename.

"},{"location":"gis/Extracting_Bathymetry_Data_From_An_Irregularly_Shaped_Region/#attachments","title":"Attachments:","text":"

image2018-12-3_13-53-22.png (image/png) image2018-12-3_13-53-12.png (image/png) image2018-12-3_13-52-46.png (image/png)

"},{"location":"gis/Importing_Digital_Elevation_Maps_DEMs_into_CSDP/","title":"Importing Digital Elevation Maps (DEMs) into CSDP","text":""},{"location":"gis/Importing_Digital_Elevation_Maps_DEMs_into_CSDP/#background","title":"Background","text":"

CSDP was developed in the late 1990s and can only consume point features in its custom format. This document outlines the process of converting modern DEMs in raster form into files that CSDP can use to bring in the latest bathymetry information that is developed in modern tools such as ArcGIS

"},{"location":"gis/Importing_Digital_Elevation_Maps_DEMs_into_CSDP/#csdp-file-format","title":"CSDP File Format","text":"

CSDP supports bathymetry data as point features in XYZ format along with columns for year and source of data. In addition CSDP allows for a metadata defining the projection system (it only supports two UTM NAD83 and NAD27)

Below is a sample header from a CSDP bathymetry file

;HorizontalDatum:  UTMNAD83\n;HorizontalZone:   10\n;HorizontalUnits:  Meters\n;VerticalDatum:    NAVD88\n;VerticalUnits:    USSurveyFeet\n;Filetype: bathmetry\n;NumElements: 1544252\n563970.000000000 4234180.000000000 112.7323 2012 SF_DEM\n563990.000000000 4234180.000000000 117.6413 2012 SF_DEM\n
"},{"location":"gis/Importing_Digital_Elevation_Maps_DEMs_into_CSDP/#steps","title":"Steps","text":"
  1. Use Arc Toolbox > Conversion Tools > From Raster > Raster to ASCII to output DEM as text file.\u00a0

    For large dems, click on the Environments in the dialog box in step 1 and make sure the \"Output Coordinates\" are in NAD83, zone 10, in meters in UTM projection, and the vertical datum should be NAVD88 in meters. and that the \"Processing Extent\" is \"Same as Display\". Zoom in to the relevant portion before running the tool in step1 and that should limit the DEM output to just the viewable area.

  2. Use this program:\u00a0ASCIIGridToCSDPConverter \\<raster ascii filename> \\<prn output filename>

  3. Open \\<prn output filename> in CSDP

  4. You can also use the CSDP:

    Select Bathymetry-Import Bathymetry from ASCII Raster

    Fill in the dialog. If dataset is more dense than you need, you can enter a factor greater than 1

"},{"location":"gis/Importing_Digital_Elevation_Maps_DEMs_into_CSDP/#attachments","title":"Attachments:","text":"

image2019-6-14_11-20-44.png (image/png) image2019-6-14_11-20-28.png (image/png)

"},{"location":"gis/Merging_multiple_versions_of_network_files/","title":"Merging multiple versions of network files","text":"

One way to merge changes from multiple users is to use a file comparison tool such as WinMerge. However, if the users did not start with identical versions of the network file, this won't work.

I have added a feature to the CSDP (Network-Save Specified Channels), which helps merge changes from network files submitted by multiple users who did not start with the same version of the network file.

Here's how I use it to merge changes into an existing network file:

  1. Get a list of modified centerlines for the new network file.
  2. Enter the list into Excel.\u00a0
  3. Copy the list, and paste-special-transpose.
  4. In the CSDP, load the existing network file, which you might refer to as the current master version.\u00a0
    1. Select Network-Save Specified Channels.
    2. In the file selector dialog, enter a filename for the new master version.\u00a0
    3. Go back to Excel, and copy the transposed list of centerline numbers. Past them into the dialog that appears (below) in the Channel Numbers field. (The list will be tab delimited, which is fine). You may not be able to see all the centerline names in the text field (I'll have to work on that), but it will work.
    4. Before clicking OK, click the \"Don't export specified channels\" checkbox. Make sure this option is selected before you click OK. When you click OK, CSDP will create a new network file containing all centerlines EXCEPT the ones you specified.
  5. Now load the new network file.
    1. Select Network-Save Specified Channels.
    2. In the file selector dialog, enter a filename for a temporary network file.
    3. Go back to Excel, and copy the transposed list of centerline numbers. Paste them into the dialog that appears (below) in the Channel Numbers field. (The list will be tab delimited, which is fine). You may not be able to see all the centerline names in the text field (I'll have to work on that), but it will work.
    4. Before clicking OK, make sure the \"Don't export specified channels\" checkbox is NOT selected.\u00a0When you click OK, CSDP will create a new network file containing only the centerlines you specified.\u00a0
  6. Use a text editor to copy the contents (excluding the headers at the top) of the new temporary network file into the new master network file. Update the numElements field in the header of the new file. The value should be the sum of the values from the two files.
"},{"location":"gis/Merging_multiple_versions_of_network_files/#attachments","title":"Attachments:","text":"

image2018-12-21_12-8-47.png (image/png) image2018-12-21_12-0-36.png (image/png) image2018-12-21_11-49-11.png (image/png)

"},{"location":"gis/Schematics_and_Boundaries/","title":"Schematics and Boundaries","text":"

CalSIM II\u00a0schematic is retrieved from BDO Central Valley modeling website

http://baydeltaoffice.water.ca.gov/modeling/hydrology/CalSim/Downloads/CalSimDownloads/BST_CALSIMII_schematic_040110.pdf

DSM2 (v812) schematic is retrieved from DSM2 release package dsm2\\documentation

DSM2 inputs are retrieved from CalSIM output

DSM2 name DSM2\u00a0node CalSIM II CalSIM 3

BOUNDARY_FLOW

calaveras

FLOW-CHANNEL

21 C508_R514 C_CLV004 cosumnes 446 C501 C_CSM005 yolo 316 C157 C_CSL005 sac FLOW 330 C169_D168B_D168C C_SAC041 vernalis 17 C639_R644 C_SJR070 moke

FLOW-INFLOW

447 I504

C_MOK022

SR_60N_MOK019

SR_26S_MOK014

SOURCE_FLOW

AntiochWW

FLOW-DELIVERY

\u00a046 D406B COSMA1 33 D514A \u00a0D_SJR028_WTPDWS COSMA2 33 D514B FRWP_EBMUD 332 D168B FRWP_SCWA 332 D168C northbay 273 D403B \u00a0C_CSL004B nb_fvb 273 D403D nb_sol 273 D403C ccc 206 D408_RS D408 CCWDVC 191 D408_VC oldr034 80 D408_OR vallejo 320 D403A cvp \u00a0181 D418_TD_ADJ \u00a0D_OMR028_DMC000 SOURCE_FLOW_RESERVOIR swp FLOW-DELIVERY clifton_court D419_TD_ADJ \u00a0D_OMR027_CAA000 NODE_CONCENTRATION vernalis SALINITY-EC 17 VERNWQFINAL VERNWQFINAL"},{"location":"gis/Schematics_and_Boundaries/#attachments","title":"Attachments:","text":"

DSM2_Grid2.0.pdf (application/pdf) BST_CALSIMII_schematic_040110.pdf (application/pdf)

"},{"location":"gis/Symbology/","title":"Symbology","text":"

The following symbology is needed:

  1. Arrows on or next to straight line channels and centerline channels.
  2. Nodes displayed as circles with numbers in the middle.
    1. In ArcGIS Pro, change the symbol to a circle (double click on the symbol in the contents pane) and set the circle size to 20 pt, and the font to Tahoma 8. Select Enable scale-based sizing.
  3. Gates displayed using symbol similar to that used in the pdf grid map. Using Meteorology-Fog, Light.
"},{"location":"gis/Symbology/#channel-arrows","title":"Channel Arrows","text":"

The direction of arrows created using symbology is determined by the order in which the points are defined in each line. Since the channel lines (both straight and centerlines) are derived from CSDP data, the points should always be in order from upstream to downstream.

To modify the symbol:

  1. double-click on the symbol below the layer name (see image below).
  2. In the Symbology Panel, under Gallery, select \"Arrow Right Middle\".\u00a0
  3. Then click on Properties, set the color, and set line width to 15 pt (or whatever you want, and select \"Enable scale-based sizing\".
"},{"location":"gis/Symbology/#attachments","title":"Attachments:","text":"

image2020-5-14_15-15-14.png (image/png) image2020-5-14_15-14-8.png (image/png)

"},{"location":"manual/","title":"Contents","text":"
  • Overview
  • Getting Started\u00a0
    • Download and installation
    • Recommended third party extras
    • Test Launching DSM2
  • Layers, Priority, Data Management
  • Operating Rules
  • Reference
"},{"location":"manual/reference/","title":"Input Tables","text":"
  • Reference
    • Boundary Flow
    • Boundary Stage
    • Channels
    • Channel Initial Condition
    • Environment substitution (ENVVAR)
    • Gate
    • Group
    • Input Climate
    • Input Gate
    • Input Transfer Flow
    • IO Files Section (non-DSS)
    • Node Concentration
    • Operating Rule
    • Output: Channel (including source tracking)
    • Output: Gate
    • Output: Reservoir (including source tracking)
    • Particle Flux Output
    • Particle Group Output
    • Particle Insertion
    • Particle Filter
    • Particle Reservoir Filter
    • Rate Coefficient
    • Reservoir
    • Reservoir Concentration
    • Reservoir Initial Condition
    • Scalar
    • Source Flow (HYDRO at nodes)
    • Source Tracking (see Output: Reservoir and Channel)
    • Tidefile
    • Transfer
"},{"location":"manual/reference/Boundary_Flow/","title":"Boundary Flow","text":""},{"location":"manual/reference/Boundary_Flow/#overview","title":"Overview:","text":"

Boundary Flows are boundary conditions at nodes where flow is known. This view defines these time series and assigns time series data to them.

"},{"location":"manual/reference/Boundary_Flow/#tables","title":"Tables:","text":"

Example

# Description:\n# Historical boundary flows to Delta\nBOUNDARY_FLOW\nNAME      NODE SIGN FILLIN   FILE          PATH                                                     \ncalaveras   21    1   last   ${BNDRYINPUT} /FILL+CHAN/RCAL009/FLOW//1DAY/${HISTFLOWVERSION}/         \ncosumnes   446    1   last   ${BNDRYINPUT} /FILL+CHAN/RCSM075/FLOW//1DAY/${HISTFLOWVERSION}/         \nmoke       447    1   last   ${BNDRYINPUT} /FILL+CHAN/RMKL070/FLOW//1DAY/${HISTFLOWVERSION}/         \nnorth_bay  273   -1   last   ${BNDRYINPUT} /FILL+CHAN/SLBAR002/FLOW-EXPORT//1DAY/${HISTFLOWVERSION}/ \nsac        330    1   last   ${BNDRYINPUT} /FILL+CHAN/RSAC155/FLOW//1DAY/${HISTFLOWVERSION}/         \nvernalis    17    1   last   ${BNDRYINPUT} /FILL+CHAN/RSAN112/FLOW//1DAY/${HISTFLOWVERSION}/         \nyolo       316    1   last   ${BNDRYINPUT} /FILL+CHAN/BYOLO040/FLOW//1DAY/${HISTFLOWVERSION}/        \nEND\n
"},{"location":"manual/reference/Boundary_Flow/#boundary_flow","title":"BOUNDARY_FLOW","text":"

The Boundary Flow table defines flow boundary conditions by giving them names and associating them with a node. The table also assigns a time series to the boundary condition. Boundary Flow is a top-level layered table.

"},{"location":"manual/reference/Boundary_Flow/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Boundary_Flow/#name","title":"NAME","text":"

Name assigned to the source. This is the identifier of the boundary and is referred to elsewhere in the input system. If you assign water quality you will use the same name in order to match concentration to flow.

"},{"location":"manual/reference/Boundary_Flow/#node","title":"NODE","text":"

Node number at which the source is applied.

"},{"location":"manual/reference/Boundary_Flow/#sign","title":"SIGN","text":"

Forces the time series to be a source or a sink. Positive values are normally associated with a source, but the data (especially sinks such as agricultural diversions) are sometimes measured in absolute flow. Use 1 to force the value to be a positive source or -1 to interpret values as a sink.

"},{"location":"manual/reference/Boundary_Flow/#fillin","title":"FILLIN","text":"

Method for filling in data if the time step of the assigned series is coarser than the time step of the model. See\u00a0fillin types

"},{"location":"manual/reference/Boundary_Flow/#file","title":"FILE","text":"

DSS or text file in which data are stored. Use consistent case when referring to the same file. You may also enter the word\u00a0constant\u00a0if you would like to assign a constant value to the input (the value will be entered in the next column).

"},{"location":"manual/reference/Boundary_Flow/#path","title":"PATH","text":"

The path within the text or DSS file of the time series data. If you used the\u00a0constant\u00a0keyword in the Input File column, enter the value (e.g.\u00a04.22) here.

"},{"location":"manual/reference/Boundary_Flow/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Boundary_Flow/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Boundary_Flow/#include-block","title":"Include Block:","text":"

HYDRO_TIME_SERIES

  • Multiple sources and sinks can be assigned to a node. They are usually kept separate in order to assign different concentrations to them.
  • HYDRO is able to accept sources and sinks at boundary nodes, but this is not good modeling practice. Use them on the interior.
"},{"location":"manual/reference/Boundary_Stage/","title":"Boundary Stage","text":""},{"location":"manual/reference/Boundary_Stage/#overview","title":"Overview","text":"

Stage Boundaries are locations where water levels are known. They are often used to represent tidal boundary of an estuary. This view defines the tidal boundary and assigns a time series to water levels at that boundary

"},{"location":"manual/reference/Boundary_Stage/#tables","title":"Tables","text":"

Example

# Description:\n# Historical stage at Martinez\nBOUNDARY_STAGE\nNAME  NODE  FILLIN  FILE           PATH                                                \nmtz   361   linear  ${BNDRYINPUT}  /FILL+CHAN/RSAC054/STAGE//15MIN/${HISTSTAGEVERSION}_NAVD/ \nEND\n
"},{"location":"manual/reference/Boundary_Stage/#stage-boundary-table","title":"Stage Boundary Table","text":"

The Stage Boundary table defines the stage boundary by giving it a name and associating it with a node. The table also assigns a time series to the boundary. Stage Boundary is a top-level layered table.

"},{"location":"manual/reference/Boundary_Stage/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Boundary_Stage/#name","title":"NAME","text":"

Name assigned to the source. This is the identifier of the boundary and is referred to elsewhere in the input system. If you assign water quality you will use the same name in order to match concentration to flow.

"},{"location":"manual/reference/Boundary_Stage/#node","title":"NODE","text":"

Node number at which the source is applied.

"},{"location":"manual/reference/Boundary_Stage/#fillin","title":"FILLIN","text":"

Method for filling in data if the time step of the assigned series is coarser than the time step of the model. See\u00a0fillin types

"},{"location":"manual/reference/Boundary_Stage/#file","title":"FILE","text":"

DSS or text file in which data are stored. Use consistent case when referring to the same file. You may also enter the word\u00a0constant\u00a0if you would like to assign a constant value to the input (the value will be entered in the next column).

"},{"location":"manual/reference/Boundary_Stage/#path","title":"PATH","text":"

The path within the text or DSS file of the time series data. If you used the\u00a0constant\u00a0keyword in the Input File column, enter the value (e.g.\u00a04.22) here.

"},{"location":"manual/reference/Boundary_Stage/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Boundary_Stage/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Boundary_Stage/#include-block","title":"Include Block:","text":"

HYDRO_TIME_SERIES

Only one boundary (flow or stage) should be assigned at a node. HYDRO is able to accept a sources and sinks at boundary nodes, but this is not good modeling practice.

"},{"location":"manual/reference/Channel_Initial_Condition/","title":"Channel Initial Condition","text":""},{"location":"manual/reference/Channel_Initial_Condition/#overview","title":"Overview:","text":"

HYDRO requires water surface and flow initial condition. This view allows the user to specify default initial conditions. The default initial condition is required. The default will be overridden if a restart file is used.

"},{"location":"manual/reference/Channel_Initial_Condition/#tables","title":"Tables:","text":"
    • Channel Initial Conditions
"},{"location":"manual/reference/Channel_Initial_Condition/#channel_ic","title":"CHANNEL_IC","text":"

The table pairs channel locations with default initial values. Interpolation is used between locations. Water surface (stage) and flow must be specified at the upstream and downstream ends of the channel.

"},{"location":"manual/reference/Channel_Initial_Condition/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Channel_Initial_Condition/#chan_no","title":"CHAN_NO","text":"

Channel number of channel where initial condition is to be applied.

"},{"location":"manual/reference/Channel_Initial_Condition/#distance","title":"DISTANCE","text":"

Distance along channel where initial condition is to be applied. This may be a numerical distance or the keyword \"length\" to indicate the end of the channel. If you edit an entry that says \"length\", you may see a complicated coded value, which is only for internal use.

"},{"location":"manual/reference/Channel_Initial_Condition/#stage","title":"STAGE","text":"

Initial water surface elevation.

"},{"location":"manual/reference/Channel_Initial_Condition/#flow","title":"FLOW","text":"

Initial flow (cfs).

"},{"location":"manual/reference/Channel_Initial_Condition/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Channel_Initial_Condition/#identifier","title":"Identifier:","text":"

CHAN_NO, DISTANCE

"},{"location":"manual/reference/Channel_Initial_Condition/#parent-table","title":"Parent Table:","text":"

Table is parent

"},{"location":"manual/reference/Channel_Initial_Condition/#include-block","title":"Include Block:","text":"

INITIAL_CONDITION

  • Default initial values specified in the GUI are replaced if a restart file is used.
  • Reservoir initial surfaces should be matched to the surrounding channels. Differences imply a flow, and if you haven't accounted for the flow in your other initial conditions you will have a flow imbalance or even instability on the first step.
"},{"location":"manual/reference/Channels/","title":"Channels","text":""},{"location":"manual/reference/Channels/#overview","title":"Overview:","text":"

Channels are the fundamental objects of the DSM2 grid. The Channels table allows you to enter channel connectivity, parameters and geometry. Channels connectivity is defined by an upstream and downstream node numbers of the channels. Two child tables describe the locations and geometry of user-described cross-sections in the selected channel. Note that a default initial condition is required for every channel number in the DSM2 grid, and this is entered seperately in the Channel Initial Conditions table.

"},{"location":"manual/reference/Channels/#tables","title":"Tables:","text":"
  • CHANNEL
  • XSECT
  • XSECT_LAYER
"},{"location":"manual/reference/Channels/#channel","title":"CHANNEL","text":"

The CHANNEL table defines the connectivity, length, friction and dispersion characteristics of a channel.

"},{"location":"manual/reference/Channels/#field-descriptions","title":"Field Descriptions","text":"

CHAN_NOChannel number. This is the identifier of the channel, and corresponds to the number you typically see on a grid map.LENGTH (ft)Length of the channel reachMANNINGManning's n friction coefficient for the whole reach.DISPERSIONDimensional dispersion factor.UPNODENumber of the upstream node at which channel is connected.DOWNNODENumber of the downstream node at which channel is connected.

"},{"location":"manual/reference/Channels/#table-info","title":"Table Info","text":"

Identifier:CHAN_NOParent Table:Table is parentInclude Block:GRID

"},{"location":"manual/reference/Channels/#xsect","title":"XSECT","text":"

This table lists files where bathymetric cross-sections are specified by the user using the CSDP format. The table lists the fraction of the distance along the reach (from upstream to downstream) at which the user cross-section is located. These cross-sections will be interpolated by the model at computational points. Overspecification of geometry is a frequent source of user error/misconception, please see\u00a0usage notes\u00a0below. Also note that this style of input and the XSECT_LAYER \"single file\" format below should not be freely mixed for a given channel -- use one or the other.

"},{"location":"manual/reference/Channels/#field-descriptions_1","title":"Field Descriptions","text":"

CHAN_NOChannel number where cross-section is locatedDISTFraction of distance from upstream node to downstream node where cross-section is locatedFILECSDP-formatted file where cross-section geometry is defined.

"},{"location":"manual/reference/Channels/#table-info_1","title":"Table Info","text":"

Identifier:CHAN_NO, DISTParent Table:CHANNELParent Identifier:CHAN_NOInclude Block:GRID

"},{"location":"manual/reference/Channels/#xsect_layer","title":"XSECT_LAYER","text":"

The Cross-Section Layer Table lists geometric information about each cross-section. This information is in the form of lookup tables of hydraulically important quantities such as area, width and wetted perimeter.

"},{"location":"manual/reference/Channels/#field-descriptions_2","title":"Field Descriptions","text":"

CHAN_NOChannel number in which cross-section is located.DISTFraction of distance from upstream node to downstream node where cross-section is locatedELEVElevation from bottom at which properties are known. The area, width, etc. apply to this elevation, and channel properties between elevations are linearly interpolated.AREAArea of channel from bottom to cross section(sq ft). Ignored if Area disagrees with the integral of WIDTH.WIDTHWidth of channel at top (ft).WET_PERIMWetted perimeter of channel at given elevation.

"},{"location":"manual/reference/Channels/#table-info_2","title":"Table Info","text":"

Identifier:CHAN_NO, DIST, ELEVParent Table:CHANNELParent Identifier:CHAN_NOInclude Block:GRID

"},{"location":"manual/reference/Channels/#examples","title":"Examples:","text":"

CHANNEL with XSECT_LAYER cross-section

# This example shows channels using cross-sections\n# In the XSECT_LAYER format. The benefit of this format\n# is that the input can all be put in one file.\n# This can be useful for archiving or echoing back input\n\n# CHANNEL SPECS\nCHANNEL\nCHAN_NO LENGTH MANNING DISPERSION UPNODE DOWNNODE\n1        15000   0.035        0.3      1        2 \n2        15000   0.035        0.3      2        3\nEND\n\n# This is a child table. Its rows must \"link\" to a parent\n# using the parent id (CHAN_NO in this case).\n# Note that two cross-sections are defined here,\n# one in each channel, halfway downstream, with three\n# layers each. \nXSECT_LAYER\nCHAN_NO DIST  ELEV   AREA WIDTH WET_PERIM\n1        0.5 -24.0    0.0  40.0      40.0 \n1        0.5   0.0  960.0  80.0     91.22 \n1        0.5  20.0 2640.0 160.0     133.6 \n2        0.5 -24.0    0.0  40.0      40.0 \n2        0.5   0.0  960.0  80.0     91.22 \n2        0.5  20.0 2640.0 160.0     133.6 \nEND\n

CHANNEL with XSECT (csdp) cross-section

# This example shows channels using cross-sections\n# In the XSECT format. The specification is not \n# complete -- we are really referring to \n# Cross-Section Development Program (CSDP) files\n# which are in there own format.\n\n# CHANNEL SPECS\nCHANNEL\nCHAN_NO LENGTH MANNING DISPERSION UPNODE DOWNNODE\n1        15000   0.035        0.3      1        2 \n2        15000   0.035        0.3      2        3\nEND\n\n# This is a child table. It is an alternative to the \n# XSECT_LAYER table (the two can co-exist, but you \n# should not mix input for a channel). The FILE column\n# points to a file that contains the real data which\n# would normally come out of the CSDP or other tool.\nXSECT\nCHAN_NO DIST     FILE\n1           0.5   1_0.50000.txt\n2           0.5   2_0.50000.txt\nEND\n
  • All channels must have an initial condition and at least one cross-section.
  • Older versions of DSM2 had the notion of a \"regular\" cross-section (meaning rectangular). In the current DSM2 this is just a cross-section with two layers.
  • Users frequently overspecificy cross-sections, either by specifying more cross-sections longitudinally than the model can possibly use or by describing cross-sections vertically in such a way as to capture highly local features such as small constrictions, sills and undulations. DSM2 is commonly used with spatial resolution (delta x) of several thousand feet. You should only include features that are well resolved by this resolution, which means changes that persist over several miles. Even more importantly, you should avoid adjacent cross-sections with bottoms that vary greatly in elevation because they can cause odd behavior when cross-sections are interpolated to computation points. The bottoms layers of cross sections should represent the \"overall\" slope of the channel.
"},{"location":"manual/reference/ENVVARS_Section/","title":"ENVVARS Section","text":""},{"location":"manual/reference/ENVVARS_Section/#overview","title":"Overview:","text":"

ENVVARs\u00a0are values used in text substitution elsewhere in the input. DSM2 attempts to replace any text that is preceded by a \"$\" and wrapped in curly braces: ${EXAMPLE}. By convention, these variables are always used in upper case. The substitution will be made from either system environmental variables or pseudo-environmental variables defined in this section. For instance, the SCALAR input section might indicate that run_start_time be set to ${START_TIME}. DSM2 will then search the system environmental variables and user-specified environmental variables for START_TIME and substitute the value (or print a warning if it finds nothing).

ENVVARs can be specified in text, or set by manipulating the command environmental variables. In production runs, many of the ENVVARs are set in a special file called the \"configuration\" file. Such a file is often included in the main input file using the CONFIGURATION include block.

"},{"location":"manual/reference/ENVVARS_Section/#reference","title":"Reference","text":""},{"location":"manual/reference/ENVVARS_Section/#keyword-descriptions","title":"Keyword Descriptions","text":""},{"location":"manual/reference/ENVVARS_Section/#name","title":"NAME","text":"

Name of the envvar. This is the alias that will be used elsewhere in the input system where the substitution is desired. For instance, if the NAME is START_TIME, ${START_TIME} would be used elsewhere.

"},{"location":"manual/reference/ENVVARS_Section/#value","title":"VALUE","text":"

Value assigned during substitution. For instance, for an ENVVAR with name START_TIME, a likely value would be\u00a0\"0000\".

"},{"location":"manual/reference/ENVVARS_Section/#table-info","title":"Table Info","text":""},{"location":"manual/reference/ENVVARS_Section/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/ENVVARS_Section/#include-block","title":"Include Block:","text":"

CONFIGURATION

"},{"location":"manual/reference/ENVVARS_Section/#examples","title":"Examples:","text":"

Definition and use:\u00a0The following example defines an ENVVAR section and then uses the variables later in a SCALAR section.

ENVVARS        \nNAME    VALUE   \nSTART_DATE  01JAN1990   # Runtime using envvars\nEND_DATE    01JAN2001   \nSTART_TIME  0000    \nEND_TIME    0000    \nEND\n\n\nSCALAR      \nmodel_name  historical_hydro    \nrun_start_date  ${START_DATE}   \nrun_end_date    ${END_DATE} \nrun_start_time  ${START_TIME}   \nrun_end_time    ${END_TIME} \nEND\n

Identifier:Table Info

NAME

"},{"location":"manual/reference/ENVVARS_Section/#parent-table","title":"Parent Table:","text":"

Table is parent

"},{"location":"manual/reference/ENVVARS_Section/#include-block_1","title":"Include Block:","text":"

CONFIGURATION

  • ENVVARs can also be used on each other -- in text input that occurs after the ENVVAR definition.

    ENVVARS NAME VALUE DSM2MODIFIER historical_v81 #Study name used for DSM2 output

"},{"location":"manual/reference/ENVVARS_Section/#output","title":"Output","text":"

OUTPUTFILE ${DSM2MODIFIER}.dss

"},{"location":"manual/reference/ENVVARS_Section/#hydro","title":"hydro","text":"

HYDROOUTDSSFILE ${DSM2OUTPUTDIR}/${OUTPUTFILE} END

"},{"location":"manual/reference/Gate/","title":"Gate","text":""},{"location":"manual/reference/Gate/#overview","title":"Overview:","text":"

Gates\u00a0are sites that present a barrier or control on flow. A gate may have an arbitrary number of associated hydraulic devices (pipes and weirs), each of which may be operated independently to control flow.

The Gates View is primarily for specifying the physical properties of the gate and some simple operating modes. Gates that are operated simply can be completely specified in this table. Much more elaborate controls are possible using Gate Time Series and Operating Rules, and in addition to manipulating the hydraulic devices you can completely uninstall the gate.

"},{"location":"manual/reference/Gate/#tables","title":"Tables:","text":"
  • Gates
  • Gate Weir Devices
  • Gate Pipe Devices
"},{"location":"manual/reference/Gate/#gate_1","title":"GATE","text":"

The Gate table defines the name and connectivity of the gate. Gates are a top-level layered table.

"},{"location":"manual/reference/Gate/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Gate/#name","title":"NAME","text":"

Name of the gate. This is the identifier of the gate used elsewhere to refer to the gate.

"},{"location":"manual/reference/Gate/#from_obj","title":"FROM_OBJ","text":"

Type (channel/reservoir) of the water body to which the gate is attached. Gates are always connected from a water body to a node. This column is a picklist that is also connected to the Name/no. column.

"},{"location":"manual/reference/Gate/#from_identifier","title":"FROM_IDENTIFIER","text":"

Identifier (channel number or reservoir name) of the water body to which the gate is attached.

"},{"location":"manual/reference/Gate/#to_node","title":"TO_NODE","text":"

Node to which gate is attached.

"},{"location":"manual/reference/Gate/#table-info","title":"Table Info","text":"

Identifier:

"},{"location":"manual/reference/Gate/#name_1","title":"NAME","text":"

Parent Table:

"},{"location":"manual/reference/Gate/#gate_2","title":"GATE","text":"

Include Block:

"},{"location":"manual/reference/Gate/#grid","title":"GRID","text":""},{"location":"manual/reference/Gate/#gate_weir_device","title":"GATE_WEIR_DEVICE","text":"

This table lists hydraulic structures that exist at the gate site to control flow that resemble weirs or rectangular conduits. In this table, the user specifies physical properties of the device as well as default operations. Both employ the following formulas depending on whether the water surface is higher on the water body or node side of the gate:

Q = nCop_toCtoA(zwb, p)\u00a0sqrt[ 2g(zwb\u00a0- znode) ] ... zwb\u00a0> znode

Q = nCop_fromCfromA(znode, p)\u00a0sqrt[ 2g(znode\u00a0- zwb) ] ... zwb\u00a0\\< znode

Where:

  • n is the number of duplicate devices>
  • Cop_to\u00a0and Cop_to\u00a0are operating coefficient representing controls such as flap gates
  • Cto\u00a0and Cfrom\u00a0are coefficients representing the hydraulic efficiency of the gate
  • A is the area of flow depending on higher water surface and position p
  • g is gravity and
  • zwb\u00a0and znode\u00a0are the water surface elevations at the water body and node (node surface is assessed by means of a reference channel that has no gates attached to it).

Please see\u00a0usage notes\u00a0below

"},{"location":"manual/reference/Gate/#field-descriptions_1","title":"Field Descriptions","text":""},{"location":"manual/reference/Gate/#gate_name","title":"GATE_NAME","text":"

Name of the gate this device in which the device is located.

"},{"location":"manual/reference/Gate/#device","title":"DEVICE","text":"

Name of the device.

"},{"location":"manual/reference/Gate/#nduplicate","title":"NDUPLICATE","text":"

Number of exact duplicates, such as a number of similar pipes in parallel. Parameters such as width apply to a single one of the duplicates.

"},{"location":"manual/reference/Gate/#width","title":"WIDTH","text":"

Maximum width of the device (radius of a pipe, width of a weir).

"},{"location":"manual/reference/Gate/#elev","title":"ELEV","text":"

Invert elevation or weir crest.

"},{"location":"manual/reference/Gate/#height","title":"HEIGHT","text":"

Height of the device from the invert elevation. This can be used to represent the height of rectangular flashboards or of a radial gate. If the surface goes above this height, flow will be submerged. Use NA for an open top. If you click in an NA column, you will see that it is encoded using a large number, but you should only use 'NA' or a real height.

"},{"location":"manual/reference/Gate/#cf_from_node","title":"CF_FROM_NODE","text":"

Flow coefficient of the gate (0 \\< Cto\u00a0\\<= 1.0) describing the efficiency of the gate from node to water body. This parameter is the physical coefficient of flow. It should never be zero and should not be used to describe a control structure or operation such as flap gates or gate openings.

"},{"location":"manual/reference/Gate/#cf_to_node","title":"CF_TO_NODE","text":"

Same as CF_FROM_NODE, but for the direction from water body to node.

"},{"location":"manual/reference/Gate/#default_op","title":"DEFAULT_OP","text":"

Default operation mode. The gate operation is a \"magic\" parameter between 0.0 and 1.0 that modulates gate flow. Operating coefficients can be used to represent flap gates, fractions of duplicates operating or other physical controls. The default ops are simple on this table are like initial conditions -- if you want more sophisticated control you will need to use a Gate Time Series or Operating Rule. Nevertheless, the defaults are enough to represent structures that are fully open or closed or operated unidirectionally. Here is how the default operation mode will affect the operating coefficient:

"},{"location":"manual/reference/Gate/#gate_open","title":"gate_open","text":"

Cop_to=1.0; Cop_from=1.0;

"},{"location":"manual/reference/Gate/#gate_close","title":"gate_close","text":"

Cop_to=0.0; Cop_from=0.0;

"},{"location":"manual/reference/Gate/#unidir_to","title":"unidir_to","text":"

Cop_to=1.0; Cop_from=0.0;

"},{"location":"manual/reference/Gate/#unidir_from","title":"unidir_from","text":"

Cop_to=0.0; Cop_from=1.0;

"},{"location":"manual/reference/Gate/#table-info_1","title":"Table Info","text":""},{"location":"manual/reference/Gate/#identifier","title":"Identifier:","text":"

GATE_NAME, DEVICE

"},{"location":"manual/reference/Gate/#parent-table","title":"Parent Table:","text":"

GATE

"},{"location":"manual/reference/Gate/#parent-identifier","title":"Parent Identifier:","text":"

GATE_NAME

"},{"location":"manual/reference/Gate/#include-block","title":"Include Block:","text":"

GRID

"},{"location":"manual/reference/Gate/#gate_pipe_device","title":"GATE_PIPE_DEVICE","text":"

This table lists pipes at the gate site. In this table, the user specifies physical properties of the device as well as default operations.

"},{"location":"manual/reference/Gate/#field-descriptions_2","title":"Field Descriptions","text":""},{"location":"manual/reference/Gate/#gate_name_1","title":"GATE_NAME","text":"

Name of the gate this device in which the device is located;

"},{"location":"manual/reference/Gate/#device_1","title":"DEVICE","text":"

Name of the device.

"},{"location":"manual/reference/Gate/#nduplicate_1","title":"NDUPLICATE","text":"

Number of exact duplicates, such as a number of similar pipes in parallel. Parameters such as width apply to a single one of the duplicates.

"},{"location":"manual/reference/Gate/#radius","title":"RADIUS","text":"

Maximum width of the device (radius of a pipe, width of a weir).

"},{"location":"manual/reference/Gate/#elev_1","title":"ELEV","text":"

Invert elevation or weir crest.

"},{"location":"manual/reference/Gate/#cf_from_node_1","title":"CF_FROM_NODE","text":"

Flow coefficient of the gate (0 \\< Cto\u00a0\\<= 1.0) describing the efficiency of the gate from node to water body. This parameter is the physical coefficient of flow. It should never be zero and should not be used to describe a control structure or operation such as flap gates or gate openings.

"},{"location":"manual/reference/Gate/#cf_to_node_1","title":"CF_TO_NODE","text":"

Same as CF_FROM_NODE, but for the direction from water body to node.

"},{"location":"manual/reference/Gate/#default_op_1","title":"DEFAULT_OP","text":"

Default operation mode. The gate operation is a \"magic\" parameter between 0.0 and 1.0 that modulates gate flow. Operating coefficients can be used to represent flap gates, fractions of duplicates operating or other physical controls. The default ops are simple on this table are like initial conditions -- if you want more sophisticated control you will need to use a Gate Time Series or Operating Rule. Nevertheless, the defaults are enough to represent structures that are fully open or closed or operated unidirectionally. Here is how the default operation mode will affect the operating coefficient:

"},{"location":"manual/reference/Gate/#gate_open_1","title":"gate_open","text":"

Cop_to=1.0; Cop_from=1.0;

"},{"location":"manual/reference/Gate/#gate_close_1","title":"gate_close","text":"

Cop_to=0.0; Cop_from=0.0;

"},{"location":"manual/reference/Gate/#unidir_to_1","title":"unidir_to","text":"

Cop_to=1.0; Cop_from=0.0;

"},{"location":"manual/reference/Gate/#unidir_from_1","title":"unidir_from","text":"

Cop_to=0.0; Cop_from=1.0;

"},{"location":"manual/reference/Gate/#table-info_2","title":"Table Info","text":""},{"location":"manual/reference/Gate/#identifier_1","title":"Identifier:","text":"

GATE_NAME, DEVICE

"},{"location":"manual/reference/Gate/#parent-table_1","title":"Parent Table:","text":"

GATE

"},{"location":"manual/reference/Gate/#parent-identifier_1","title":"Parent Identifier:","text":"

GATE_NAME

"},{"location":"manual/reference/Gate/#include-block_1","title":"Include Block:","text":"

GRID

"},{"location":"manual/reference/Gate/#several-types-of-time-series-and-operational-controls-can-be-placed-on-gates","title":"Several types of time series and operational controls can be placed on gates","text":"
  • At least one channel at every node must be ungated.

  • Gates can be removed using an operation rule that sets the gates\u00a0install variable\u00a0 to zero. Gates that are uninstalled behave like normal nodes with equal water surface constraints between them. Operations and time series that manipulate the device operating coefficients and positions will be applied, but the devices will be totally ignored in computations to determine flow. flow.

  • Gates can be controlled by a number of variables that are time-varying and controlled by time series or operating rules:

    install

    • Install applies to the whole gate, not individual devices. When the gate is uninstalled (install=0) the gate ceases to exist, none of its devices are applied (although the continue to exist in the background). The gate is totally replaced by an equal-stage compatibility condition.
  • op_to_node

    • Operating coefficient in the direction from water body to node.
  • op_from_node

    • Operating coefficient in the direction from node to water body.
  • op

    • Operating coefficient in both directions. This is just a convenience combo of the individual to/from node versions. It is write-only in operating rules, because it combines two variables and there is no single value that can be read.
  • position

    • Physical operating position whose interpretation depends on the Position Control setting of the gate device. This is now deprecated in favor of more direct manipulation of things like gate elevation.
  • elev

    • Weir crest or pipe invert elevation. This can represent evolution over time or a bottom-operating structure.
  • width

    • Weir width or pipe radius. This usually represents evolution over time.
  • height

    • Weir gate height, width of a flashboard. This can represent evolution over time or a top-operating structure like a radial gate.
"},{"location":"manual/reference/Groups/","title":"Groups","text":""},{"location":"manual/reference/Groups/#overview","title":"Overview","text":"

GROUPS\u00a0are user-defined groups of model objects, for instance groups of water bodies or groups of boundary inputs. Groups are used a number of places in DSM2, including: tracking of constituents originated from grouped sources, tracking of particles as they reside or move between groups of water bodies and/or boundaries, and assignment of rate coefficients in QUAL to groups of water bodies. In each context, the types of model objects that are allowed in the groups may be slightly different. That validation takes place elsewhere in the object using the group.

"},{"location":"manual/reference/Groups/#tables","title":"Tables","text":"
  • Groups
  • Group Members
"},{"location":"manual/reference/Groups/#group","title":"GROUP","text":"

The GROUP table defines the name of a group. It has one column!!! The reason we do this is to provide a top level table for overriding and redefining groups in the layering system.

"},{"location":"manual/reference/Groups/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Groups/#name","title":"NAME","text":"

Name of the group. This is the identifier for the group used in references elsewhere in the input system.

"},{"location":"manual/reference/Groups/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Groups/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Groups/#include-block","title":"Include Block:","text":"

GROUPS

"},{"location":"manual/reference/Groups/#group_member","title":"GROUP_MEMBER","text":"

The Group Members Table lists members of the parent group. The group members are identified using patterns written using regular expression and a special syntax for ranges of numbers. If this sounds like nonsense -- don't worry. The examples should cover most of the important ways you would want to define group members. Note also that you can use multiple rows to define the group -- the result will be the union of the members from the individual rows.

"},{"location":"manual/reference/Groups/#field-descriptions_1","title":"Field Descriptions","text":""},{"location":"manual/reference/Groups/#member_type","title":"MEMBER_TYPE","text":"

The type (channel, etc) of model object.

"},{"location":"manual/reference/Groups/#identifierpattern","title":"Identifier/Pattern","text":"

A pattern that will be matched against the identifier of the object (channel number, input name, etc). The pattern can be a regular expression or use the special range notation.

Here are some examples:

range:132-176\n

Matches any number in this range,inclusive

dicu_drn_.*\n

Dot-star is a wildcard matches any name that starts with dicu_drn)

mtz\n

Exact name

(183|184|185)\n

A choice of number identifiers

(mtz|sjr)\n

A choice of names.

14[2-7]\n

The regular expression way of doing ranges, which works for a single digit

"},{"location":"manual/reference/Groups/#table-info_1","title":"Table Info","text":""},{"location":"manual/reference/Groups/#identifier_1","title":"Identifier:","text":"

GROUP_NAME,MEMBER_TYPE,PATTERN

"},{"location":"manual/reference/Groups/#parent-table","title":"Parent Table:","text":"

GROUP

"},{"location":"manual/reference/Groups/#parent-identifier","title":"Parent Identifier:","text":"

GROUP_NAME

"},{"location":"manual/reference/Groups/#include-block_1","title":"Include Block:","text":"

GROUPS

  • A regular expressions description can be found in\u00a0wikipedia\u00a0and a tutorial and guide can be found at\u00a0http://www.regular-expressions.info/. You can probably do most of the group matching you want by modifying the above sample patterns, but\u00a0the possibilities are endless.
"},{"location":"manual/reference/IO_Files/","title":"IO Files","text":""},{"location":"manual/reference/IO_Files/#overview","title":"Overview:","text":"

The\u00a0IO_FILES\u00a0table is where you declare most of the non-dss output from a simulation, including echoed text output files, restart files and output tidefiles from HYDRO(input tidefiles are specified for QUAL and PTM in the\u00a0TIDEFILEsection). IO_FILES can only be specified in the main text input file (hydro.inp, qual.inp, ptm.inp).

"},{"location":"manual/reference/IO_Files/#tables","title":"Tables:","text":""},{"location":"manual/reference/IO_Files/#io_file","title":"IO_FILE","text":""},{"location":"manual/reference/IO_Files/#keyword-descriptions","title":"Keyword Descriptions","text":""},{"location":"manual/reference/IO_Files/#model","title":"MODEL","text":"

Model generating the file. For a restart file this should be the model (hydro|qual) that is being restarted. For echoed output use the keyword \"output\".

"},{"location":"manual/reference/IO_Files/#type","title":"TYPE","text":"

Type of file: hdf5, restart,\u00a0 or \"none\" for echoed output.

"},{"location":"manual/reference/IO_Files/#io","title":"IO","text":"

Type of file \"in\" \"out\" or \"none\" for echoed output.

"},{"location":"manual/reference/IO_Files/#interval","title":"INTERVAL","text":"

Interval, for hdf5 tidefile output.

"},{"location":"manual/reference/IO_Files/#filename","title":"FILENAME","text":"

Name of file. Should have a suitable extension:\u00a0*.hrf\u00a0for hydro restart file,\u00a0*.qrffor qual restart file,\u00a0*.h5\u00a0for hdf5 tidefile or\u00a0*.out\u00a0for echoed output.

"},{"location":"manual/reference/IO_Files/#table-info","title":"Table Info","text":""},{"location":"manual/reference/IO_Files/#identifier","title":"Identifier:","text":"

none: no layering

"},{"location":"manual/reference/IO_Files/#include-block","title":"Include Block:","text":"

none: launch file only

"},{"location":"manual/reference/IO_Files/#examples","title":"Examples:","text":""},{"location":"manual/reference/IO_Files/#hydro-example","title":"HYDRO example:","text":"

This example includes standard hydro runtime output file, a restart output file that is regenerated every model day (overwriting the previous day's file), an hdf5 tidefile for passing information to QUAL and an echo file (replicate of input). All of the file names use text substitution -- the value would come from an environmental variable, ENVVARS section in the input file or ENVVARS section of a config file.

IO_FILES      \nMODEL  TYPE     IO    INTERVAL FILENAME  \noutput none     none  none     ${HYDROOUTFILE}  \nhydro  restart  out   1day     ${QUALRSTFILE}  \nhydro  hdf5     out   1hour    ${HYDROHDF5FILE}  \nhydro  echo     out   none     ${DSM2MODIFIER}_hydro_echo.inp  \nEND\n
"},{"location":"manual/reference/IO_Files/#qual-example","title":"QUAL example:","text":"

This example includes a general qual runtime output file, a restart output file that is regenerated every model day (overwriting the previous day's file), a restart file that will be used to generate the initial condition for the run, and an hdf5 tidefile for passing information to QUAL and an echo file (exact replicate of input).

IO_FILES      \nMODEL  TYPE    IO   INTERVAL FILENAME  \noutput none    none none     ${QUALOUTFILE}  \nqual   restart out  1day     ${QUALRESTART}  \nqual   restart in   none     qualinit_30SEP1999.qrf  \nqual   hdf5    out  1hour    ${QUALHDF5FILE} \nqual   echo    out  none     ${DSM2MODIFIER}_qual_echo.inp  \nEND\n
"},{"location":"manual/reference/IO_Files/#ptm-example","title":"PTM example:","text":"

This example includes a PTM trace file (which is required to produce flux DSS output) and an animation file (which is required for animated output).

IO_FILES      \nMODEL TYPE  IO  INTERVAL FILENAME  \nptm   trace out none     ${DSM2OUTPUTDIR}/trace.out  \nptm   anim  out 15min    ${DSM2OUTPUTDIR}/anim.out\nptm   echo  out none     ${DSM2MODIFIER}_ptm_echo.inp  \nEND\n

The runtime output file from HYDRO is used in the preparation of PTM visualization tools.

"},{"location":"manual/reference/Input_Climate/","title":"Input Climate","text":""},{"location":"manual/reference/Input_Climate/#overview","title":"Overview:","text":"

Climate inputs are time series assignments to climate variables used in non-conservative constituent runs.

"},{"location":"manual/reference/Input_Climate/#tables","title":"Tables:","text":"
    • INPUT_CLIMATE
"},{"location":"manual/reference/Input_Climate/#input_climate","title":"INPUT_CLIMATE","text":"

Climate input assigns time varying properties to gate\u00a0parameters, The table assigns a time series data source.

"},{"location":"manual/reference/Input_Climate/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Input_Climate/#name","title":"NAME","text":"

Name of the input, used for layering.

"},{"location":"manual/reference/Input_Climate/#variable","title":"VARIABLE","text":"

The variable that is set by this assignment.

"},{"location":"manual/reference/Input_Climate/#fillin","title":"FILLIN","text":"

Method for filling in data if the time step of the assigned series is coarser than the time step of the model. See\u00a0fillin types

"},{"location":"manual/reference/Input_Climate/#file","title":"FILE","text":"

DSS or text file in which data are stored. Use consistent case when referring to the same file. You may also enter the word\u00a0constant\u00a0if you would like to assign a constant value to the input (the value will be entered in the next column).

PATHThe path within the text or DSS file of the time series data. If you used the\u00a0constant\u00a0keyword in the Input File column, enter the value (e.g.\u00a04.22) here.

"},{"location":"manual/reference/Input_Climate/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Input_Climate/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Input_Climate/#include-block","title":"Include Block:","text":"

QUAL_TIME_SERIES

"},{"location":"manual/reference/Input_Gate/","title":"Input Gate","text":""},{"location":"manual/reference/Input_Gate/#overview","title":"Overview:","text":"

Gate inputs are time series assignments to gate structure physical and operational parameters.

"},{"location":"manual/reference/Input_Gate/#tables","title":"Tables:","text":"
    • INPUT_GATE
"},{"location":"manual/reference/Input_Gate/#input_gate","title":"INPUT_GATE","text":"

A gate input assigns time varying properties to to\u00a0gate\u00a0parameters, The table assigns a time series data source.

Gate paths in DSS should be of data type INST-VAL as opposed to PER-AVER (which provide for better viewing in HECDSSVue) else it is possible that gate does not operate as expected.

"},{"location":"manual/reference/Input_Gate/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Input_Gate/#gate_name","title":"GATE_NAME","text":"

This must be the same as the name of the gate.

"},{"location":"manual/reference/Input_Gate/#device","title":"DEVICE","text":"

This must be the same as the name of the gate device. Generally all the variables except \"install\" are device specific. If the variable is \"install\" set the device to \"none\".

"},{"location":"manual/reference/Input_Gate/#variable","title":"VARIABLE","text":"

The variable that is set by this assignment.

"},{"location":"manual/reference/Input_Gate/#fillin","title":"FILLIN","text":"

Method for filling in data if the time step of the assigned series is coarser than the time step of the model. See\u00a0fillin types

"},{"location":"manual/reference/Input_Gate/#file","title":"FILE","text":"

DSS or text file in which data are stored. Use consistent case when referring to the same file. You may also enter the word\u00a0constant\u00a0if you would like to assign a constant value to the input (the value will be entered in the next column).

"},{"location":"manual/reference/Input_Gate/#path","title":"PATH","text":"

The path within the text or DSS file of the time series data. If you used the\u00a0constant\u00a0keyword in the Input File column, enter the value (e.g.\u00a04.22) here.

"},{"location":"manual/reference/Input_Gate/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Input_Gate/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Input_Gate/#include-block","title":"Include Block:","text":"

HYDRO_TIME_SERIES

"},{"location":"manual/reference/Input_Transfer_Flow/","title":"Input Transfer Flow","text":""},{"location":"manual/reference/Input_Transfer_Flow/#overview","title":"Overview:","text":"

Transfer Flows are flow time series assignments to pre-defined mass transfers.

"},{"location":"manual/reference/Input_Transfer_Flow/#tables","title":"Tables:","text":""},{"location":"manual/reference/Input_Transfer_Flow/#input_transfer_flow","title":"INPUT_TRANSFER_FLOW","text":"

The transfer flow table assigns time series flows to\u00a0transfers, The table assigns a time series data source to the boundary condition.

"},{"location":"manual/reference/Input_Transfer_Flow/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Input_Transfer_Flow/#transfer_name","title":"TRANSFER_NAME","text":"

This must be the same as the name of the transfer.

"},{"location":"manual/reference/Input_Transfer_Flow/#fillin","title":"FILLIN","text":"

Method for filling in data if the time step of the assigned series is coarser than the time step of the model. See\u00a0fillin types

"},{"location":"manual/reference/Input_Transfer_Flow/#file","title":"FILE","text":"

DSS or text file in which data are stored. Use consistent case when referring to the same file. You may also enter the word\u00a0constant\u00a0if you would like to assign a constant value to the input (the value will be entered in the next column).

"},{"location":"manual/reference/Input_Transfer_Flow/#path","title":"PATH","text":"

The path within the text or DSS file of the time series data. If you used the\u00a0constant\u00a0keyword in the Input File column, enter the value (e.g.\u00a04.22) here.

"},{"location":"manual/reference/Input_Transfer_Flow/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Input_Transfer_Flow/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Input_Transfer_Flow/#include-block","title":"Include Block:","text":"

HYDRO_TIME_SERIES

Only one flow (and no concentration) can be assigned to a transfer.

"},{"location":"manual/reference/Layers/","title":"Layers","text":""},{"location":"manual/reference/Layers/#overview","title":"Overview:","text":"

DSM2 batches input data into files or \"layers\" in order to achieve the following goals:

  • To group input into cohesive packages with similar content(examples: the standard grid, sdip operating rules).
  • To identify which items are changed when a new group of inputs is added to an existing simulation.

For example, consider the two layers of channels in the figure below. The first layer defines seven channels and would have seven entries in the CHANNEL table. This might represent a \"base\" grid. The second layer changes the properties of Channel 2, adds a Channel 8 and removes Channel 7. The second layer will have only three entries, shown in red. These entries represent the changes relative to Layer 1, and presumably are thematically related.

"},{"location":"manual/reference/Layers/#example-channel","title":"Example: Channel","text":"

Consider the above example using text input. We are going to create CHANNEL\u00a0tables representing the channel connectivity, and assume the geometry is provided with CSDP style cross-sections listed in an\u00a0XSECT\u00a0table (child items are always associated with parent items in the same file).

The base data will be in a file\u00a0channel_base.inp:

channel_base.inp

CHANNEL\nCHAN_NO LENGTH MANNING DISPERSION UP_NODE DOWN_NODE\n1        18000   0.030       0.80       1         2\n2         8000   0.040       0.80       2         3\n3        18000   0.040       0.80       3         4\n4        18000   0.040       0.80       4         5\n5        18000   0.040       0.80       3         5\n6        22000   0.040       0.80       5         6\n7        14000   0.040       0.80       6         7\nEND\n\nXSECT\nCHAN_NO   DISTANCE    FILE\n1         0.200       1_0_200.txt\n1         0.800       1_0_800.txt\n2         0.500       2_0_500.txt\n...\n7         0.900       7_0_900.txt\nEND\n

The revisions are in\u00a0channel_revise.inp:

channel_revise.inp

CHANNEL\nCHAN_NO LENGTH MANNING DISPERSION UP_NODE DOWN_NODE\n2         8000   0.030       0.80       2         3 # Masks + Alters\n#3        9000   0.000       0.00      19        20 # Has no effect\n^7       14000   0.040       0.80       6         7 # Masks + Deletes\n...\n8        16000   0.040       0.80       8         3 # Adds\nEND\n\nXSECT\nCHAN_NO DISTANCE  FILE\n2          0.100  2_0_500.txt  # Masks lower level x-sects \n2          0.700  2_0_500.txt  #\n7          0.900  7_0_900.txt  # Will be ignored\n8          0.500  8_0_500.txt  # \nEND\n

The two layers are managed by the model input file that is given directly to the model, in this case\u00a0hydro_layering.inp. The two channel files are listed in a GRID include block that lists the layers in increasing priority.

hydro.inp

GRID\nchannel_base.inp\nchannel_revise.inp\nEND\n

Now lets consider the details...

"},{"location":"manual/reference/Layers/#include-blocks","title":"Include Blocks","text":"

Include blocks are input blocks in the master file that list other files. The data from these other files is \"included\" in the order listed. Priority is given to files read later, and these are assigned a higher \"layer number\"

Include blocks can only contain specific types of input data. For instance, a GRID input block only contains channel, gate, reservoir and transfer physical specifications (not boundary conditions attached to them). So the trick to using include blocks is knowing, say, that a CHANNEL table belongs in a file in a GRID include block and BOUNDARY_FLOW table belongs in a file in a HYDRO_TIME_SERIES block. In the reference documentation, the include blocks should be listed for each table in the\u00a0Table Information\u00a0section..

The only exception is the master file that is the one sent to the model on the command line (often named something like hydro.inp, qual.inp, ptm.inp). Data in this file always take precedence over other input.

"},{"location":"manual/reference/Layers/#layer-overriding","title":"Layer Overriding","text":"

Layer overriding occurs when the same data item is defined in multiple layers (files) in the same model. Files that are read later are given a higher \"layer number\" and take precendence over files read earlier. Within the same file it is an error to redefine an entry.

"},{"location":"manual/reference/Layers/#identifiers","title":"Identifiers","text":"

To use layering, you have to know what constitutes redefining an entry. Whether two items represent the same data item depends on the identifier for the table, which is some combination of columns that uniquely identify the item using a name or number. Identifiers for each table are listed in the reference documents. In the above example it is channel number CHAN_NO. The trickiest identifiers are in the output, because they involve two (NAME, VARIABLE) or three (NAME, VARIABLE,SOURCE_NAME) columns. In the reference documentation, the identifier is listed for each table in the\u00a0Table Information\u00a0section.

"},{"location":"manual/reference/Layers/#parent-child-tables","title":"Parent-child Tables","text":"

When parent-child tables are present in a file (e.g., Channels, Cross Section, Cross Section Layer), overriding is assessed at the level of the parent or\u00a0top-level table. When you override on a top-level table, its child table information is completely replaced as well. So, for instance, the cross-section at Channel 2 Distance 0.500 in channel_base.inp in the example is completely ignored. The model makes no attempt to \"mix it in\" with the replacement version of Channel 2.

Child tables must be in the same file as their parent tables. This is a departure from earlier versions of DSM2, but is necessary to make layering well-defined.

"},{"location":"manual/reference/Layers/#deleting-lower-level-data","title":"Deleting lower level data","text":"

Occasionally, the motivation for overriding an item is to eliminate it. You can do this on any top-level table by prepending a carat ^ at the beginning of the line. This will remove items on lower levels with the same identifier. Note that it doesn't matter what other data you put in the parent fields (you do need placeholders). Also you needn't add child information if the only reason for the parent entry is to delete it -- but sometimes it is nice to have the child data there if you are toggling back and forth.

Deleting data is quite different from commenting it out (using a # sign). Commenting data out on a high level would merely mean that the input reader would skip over the line. It would not affect any data with the same identifier on a lower level.

"},{"location":"manual/reference/Manual/","title":"Contents","text":"
  • Overview
  • Getting Started\u00a0
    • Download and installation
    • Recommended third party extras
    • Test Launching DSM2
  • Layers, Priority, Data Management
  • Operating Rules
  • Reference
"},{"location":"manual/reference/Node_Concentration/","title":"Node Concentration","text":""},{"location":"manual/reference/Node_Concentration/#overview","title":"Overview:","text":"

Node concentration represents the concentration of constituents attached to boundary inflows and sources.

"},{"location":"manual/reference/Node_Concentration/#tables","title":"Tables:","text":""},{"location":"manual/reference/Node_Concentration/#node_concentration","title":"NODE_CONCENTRATION","text":"

The NODE_CONCENTRATION table attaches concentrations to boundary and source flows defined in HYDRO. The table also assigns a time series to the source.

"},{"location":"manual/reference/Node_Concentration/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Node_Concentration/#name","title":"NAME","text":"

Name assigned to the source. An entry here must have the same name as an entry in the BOUNDARY_STAGE, BOUNDARY_FLOW or SOURCE_FLOW tables -- by matching names you will attach concentrations to the flow.

"},{"location":"manual/reference/Node_Concentration/#node_no","title":"NODE_NO","text":"

Node number where the flow is applied. This must match the node number given in the original flow table (it is a bit redundant, but easier to look things up).

"},{"location":"manual/reference/Node_Concentration/#variable","title":"VARIABLE","text":"

Constituent name. If no output is requested for the constituent currently it will be ignored.

"},{"location":"manual/reference/Node_Concentration/#fillin","title":"FILLIN","text":"

Method for filling in data if the time step of the assigned series is coarser than the time step of the model.

"},{"location":"manual/reference/Node_Concentration/#file","title":"FILE","text":"

DSS or text file in which data are stored. Use consistent case when referring to the same file. You may also enter the word\u00a0constant\u00a0if you would like to assign a constant value to the input (the value will be entered in the next column).

"},{"location":"manual/reference/Node_Concentration/#path","title":"PATH","text":"

The path within the text or DSS file of the time series data. If you used the\u00a0constant\u00a0keyword in the Input File column, enter the value (e.g.\u00a04.22) here.

"},{"location":"manual/reference/Node_Concentration/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Node_Concentration/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Node_Concentration/#include-block","title":"Include Block:","text":"

QUAL_TIME_SERIES

Multiple sources and sinks can be assigned to a node. They are often kept separate in order to assign different concentrations to them

"},{"location":"manual/reference/Operating_Rule/","title":"Operating Rule","text":""},{"location":"manual/reference/Operating_Rule/#overview","title":"Overview:","text":"

Operating rules are user-written rules that manipulate model inputs such as gate operations, boundary flows based on observations of the current state of the running model. Operating rules are documented in detail in the\u00a0DSM2 Op Rule Guide. The Operating Rules table lists the time series, expressions and rule definitions.

"},{"location":"manual/reference/Operating_Rule/#tables","title":"Tables:","text":"
  • OPERATING RULE
  • OPRULE_TIME_SERIES
  • OPRULE_EXPRESSION
"},{"location":"manual/reference/Operating_Rule/#operating_rule","title":"OPERATING_RULE","text":"

Defines the name, action and trigger of the operating rule.

"},{"location":"manual/reference/Operating_Rule/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Operating_Rule/#name","title":"NAME","text":"

Name of the operating rule. This is the identifier of the rule.

"},{"location":"manual/reference/Operating_Rule/#action","title":"ACTION","text":"

Definition of the action to be taken when the trigger transitions from FALSE to TRUE.

"},{"location":"manual/reference/Operating_Rule/#trigger","title":"TRIGGER","text":"

Trigger that activates the rule when it transitions from FALSE to TRUE. If the trigger is NULL it will become the trivial TRUE trigger, which is assumed to make a transition from FALSE to TRUE at startup (it is not \"always\" active).Use

"},{"location":"manual/reference/Operating_Rule/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Operating_Rule/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Operating_Rule/#parent-table","title":"Parent Table:","text":"

Table is parent

"},{"location":"manual/reference/Operating_Rule/#include-block","title":"Include Block:","text":"

OPERATIONS

"},{"location":"manual/reference/Operating_Rule/#oprule_time_series","title":"OPRULE_TIME_SERIES","text":"

This table lists time series that are used in forming action and trigger definitions. The table is not a child table -- it is a top-level layered table.

"},{"location":"manual/reference/Operating_Rule/#field-descriptions_1","title":"Field Descriptions","text":""},{"location":"manual/reference/Operating_Rule/#name_1","title":"NAME","text":"

Name assigned to the time series. This is the identifier of the series. It is also the name used to refer to the series in expressions.

"},{"location":"manual/reference/Operating_Rule/#fillin","title":"FILLIN","text":"

Method used to interpolate when the model time step is finer than the time series time step. Use \"last\" to use the last time stamp in the period (a HEC-DSS convention) and \"linear\" to interpolate linearly

"},{"location":"manual/reference/Operating_Rule/#file","title":"FILE","text":"

Input \u00a0file (HEC-DSS or text file in HEC-DSS format) storing the time series or the word\u00a0constant\u00a0if the series is assigned a fixed value.

"},{"location":"manual/reference/Operating_Rule/#path","title":"PATH","text":"

HEC-DSS path of the data within the Input File or the value (e.g.\u00a02.0) if the series is assigned a fixed value.

"},{"location":"manual/reference/Operating_Rule/#table-info_1","title":"Table Info","text":""},{"location":"manual/reference/Operating_Rule/#identifier_1","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Operating_Rule/#parent-table_1","title":"Parent Table:","text":"

Table is parent

"},{"location":"manual/reference/Operating_Rule/#include-block_1","title":"Include Block:","text":"

OPERATIONS

"},{"location":"manual/reference/Operating_Rule/#oprule_expression","title":"OPRULE_EXPRESSION","text":"

This table allows the user to list expressions that can be reused later in operating rule actions and triggers. Expressions cannot depend on other expressions. Expressions are not a child table -- the table is a top-level layered table.

"},{"location":"manual/reference/Operating_Rule/#field-descriptions_2","title":"Field Descriptions","text":""},{"location":"manual/reference/Operating_Rule/#name_2","title":"NAME","text":"

Name of the expression. This is the identifier of the expression. It is also the name used to refer to the expression in expressions.\u00a0

"},{"location":"manual/reference/Operating_Rule/#definition","title":"DEFINITION","text":"

Definition of the expression -- this will be a forumula involving model variables, seasons and time series. The time series can be from the above time series table or elsewhere in the Input Time Series section. Please see the Operating Rules Guide for more details.

"},{"location":"manual/reference/Operating_Rule/#table-info_2","title":"Table Info","text":""},{"location":"manual/reference/Operating_Rule/#identifier_2","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Operating_Rule/#parent-table_2","title":"Parent Table:","text":"

Table is parent

"},{"location":"manual/reference/Operating_Rule/#include-block_2","title":"Include Block:","text":"

OPERATIONS

  • Numerous usage comments in the Operating Rules Guide.
  • Time series referenced in the operating rules may be defined in an OPRULE_TIME_SERIES table or they may be time series defined elsewhere, such as the name of a boundary flow.
  • Neither the OPRULE_EXPRESSION or OPRULE_TIME_SERIES table is a child table of OPERATING_RULE. However, it is common to put related items in the same file.
"},{"location":"manual/reference/Operating_Rule_Guide/","title":"Operating Rule Guide","text":""},{"location":"manual/reference/Operating_Rule_Guide/#operating-rule-guide_1","title":"Operating Rule Guide","text":""},{"location":"manual/reference/Operating_Rule_Guide/#introduction","title":"Introduction","text":"

DSM2 uses a text language for operating rules, and the rules are stored in the database. Operating rules combine trigger and action directives, each of which is an expression based on observed model states, seasonal information and exogenous time series input as well as other expressions.

Actions are things the operating rule does. In DSM2-DB, the actions affect either gate devices or source/sink flow boundaries. For gate devices the operating flow coefficient can be changed. For sources and sinks, flow may be set to a new constant value or a new time series. Expressions for actions tend to be of the form:

SET model_object TO numerical_expression\n

The action becomes applicable when a corresponding trigger goes from false to true. Triggers are written with expressions that evaluate true or false:

chan_stage(channel=132, dist=1000) < -0.1\n

Some rules are immediate responses to model conditions (close the gate when stage dips below 0.5). Other rules use triggers to describe seasons or situations where the action is applicable (reduce a boundary flow when the month is between May and September). Still others rules apply from the beginning of the run and the trigger column is just a nuisance \u2013.

"},{"location":"manual/reference/Operating_Rule_Guide/#expressions","title":"Expressions","text":"

An expression is just a named quantity that is derived from model data, outside time series data, math and time functions. An example of a simple numerical expression based on current DSM2-DB flow looks like this:

ebb := chan_flow(channel=132, dist=1000) > 0.01\n

This example samples the current time step model flow 1,000 ft downstream of the upstream node in channel 132 and checks whether it is greater than 0.01 cfs. The expression assigns the answer the name ebb, so it can be reused in later expressions. Note that ebb is a logical expression which evaluates to true or false depending on the model time step. Numerical expressions will be introduced shortly.

Assignments of named expressions always start with a name the assignment operator \u201c:=\u201d. Spaces around the assignment and greater-than operators are optional. The assignment operator isn\u2019t actually used in the GUI, because there is a separate column for the name and definition.

The chan_flow part of the expression represents the value of a model variable. Model variables typically require identifiers, which are included in parenthesis and are a comma-separated list with elements that depend on the context (see the section below on DSM2 model variable identifiers). These identifiers can be numerical or text strings:

chan_flow(channel=132, dist=1000) ...numerical\n\ngate_op(gate=middle_river_barrier, device=weir) ...strings\n

The examples thus far have been logical expressions. Logical expressions usually appear in triggers rather than actions. Besides logical expressions, expressions that evaluate to numerical values can be defined:

ebbmagnitude := log(chan_flow(channel=132, dist=1000))\n

and expressions can also involve simple math operators. For instance:

ebbmagnitude := log(chan_flow(channel=132, dist=1000))\n

is an expression that evaluates flow, applies the log function to it and then assigns it to the variable name ebbmagnitude. For details, see the section below on Math Operators)

Model time can also be used in expressions. The following expression describes the VAMP season for San Joaquin river management:

vamp := (MONTH == APR) or (MONTH == MAY)\n

The definition could also include the date, day of the month, or time of day.

month, or time of day. Finally, the following example combines a model state (stage/water surface) observation, an external time series (called tide_level) and some simple arithmetic. The expression might be used with a slowly fluctuating tide or sea level datum to provide an idea of critical stage in the South Delta compared to ambient tide conditions.

critical_stage := chan_stage(channel=132,dist=1000)<(tide_level-1.0)\n
"},{"location":"manual/reference/Operating_Rule_Guide/#operating-rules","title":"Operating Rules","text":"

It is now straightforward to use expressions in operating rules. The following example is based on expressions that were developed above. Bold face words correspond to tables or columns of the GUI.

"},{"location":"manual/reference/Operating_Rule_Guide/#name","title":"Name","text":"
middle_vamp_ebb\n
"},{"location":"manual/reference/Operating_Rule_Guide/#expressions_1","title":"Expressions","text":"
ebb := chan_flow(channel=132, dist=1000) > 0.01\nvamp := (month == Apr) OR (month == May)\n
"},{"location":"manual/reference/Operating_Rule_Guide/#trigger","title":"Trigger","text":"
vamp AND ebb\n
"},{"location":"manual/reference/Operating_Rule_Guide/#action","title":"Action","text":"
SET gate_op(gate=middle_river_barrier, device=weir) TO ts(new_time_series)\n

The middle_vamp_ebb operating rule lies dormant until the first time step when vamp and ebb (a compound expression based on the expressions vamp and ebb) becomes true. At that point the action will be taken and the weir operating coefficient will start to operate according to the values in the DSS time series new_time_series. Note that except for the expression definitions, the parts of this operating rule can be united using the name assignment (:=) and WHERE directives:

middle_vamp_ebb := SET gate_op(gate= middle_river_barrier,device = weir) TO ts(new_time_series) WHERE (vamp AND ebb)\n

This is the form of the operating rule that would be used, say, when parsing a text file rather than using the GUI.

"},{"location":"manual/reference/Operating_Rule_Guide/#prediction","title":"Prediction","text":"

Anticipation using linear or quadratic extrapolation can be added to numerical expressions in expressions using the PREDICT function. What is nice about PREDICT is that it allows trigger expressions to more accurately express the intent of a rule, because you don't need \"buffers\" which are confusing and inaccurate.

For instance lets say you want to take some action like close a gate to protect stage in channel 206 in the South Delta from going below zero. If you use a buffer, you write the following:

SET [some action] WHEN chan_stage(chan=206, dist=0) <1);\n

This is confusing because the value \"1\" is used as the trigger criterion when the intent has to do with stage of 0 and not 1. It is inaccurate because it will go off no matter what the trend is. With anticipation, the same rule would look like this:

SET [some action] WHEN PREDICT(chan_stage(chan=206, dist=0),LINEAR, 30MIN) < 0;\n

This states the trigger clearly in terms of the value 0. It is also much less likely to go off by accident, because the time trend is used (stage going below 1 is not significant if it is dropping very slowly and not likely to make it to 0). In addition to LINEAR extrapolation quadratic predictions are available using QUAD as the second argument to PREDICT. Over time periods of less than an hour (and not right next to a gate or reservoir), quadratic interpolation is markedly more accurate than linear.

"},{"location":"manual/reference/Operating_Rule_Guide/#ramp-transition","title":"RAMP (transition)","text":"

For actions, there is also a way to smooth time. The keyword RAMP after an action (together with a number of minutes) will transition in the action gradually, if such a transition makes physical sense.

For instance, a ramping version of middle_vamp_ebb might use the definition for ebb:

SET gate_op( gate=middle_r_barrier, device=radial) TO ts(new_time_series) RAMP 60min\n
"},{"location":"manual/reference/Operating_Rule_Guide/#complementary-triggers-and-ifelse","title":"Complementary Triggers and IFELSE","text":"

Often, an operating rule is paired with a complimentary rule that will reverse its action. For instance, to complement the above rule for ebb flow the following operating rule for flood flow might be added:

"},{"location":"manual/reference/Operating_Rule_Guide/#name_1","title":"Name","text":"

middle_vamp_flood

"},{"location":"manual/reference/Operating_Rule_Guide/#expressions_2","title":"Expressions","text":"
flood := chan_flow(channel=132, dist=1000) < -0.01\nvamp := (month == Apr) or (month == May)\n
"},{"location":"manual/reference/Operating_Rule_Guide/#trigger_1","title":"Trigger","text":"
vamp and flood\n
"},{"location":"manual/reference/Operating_Rule_Guide/#action_1","title":"Action","text":"
SET gate_op( gate=middle_r_barrier, device=barrier,direction=to_node) TO old_time_series\n

This rule effectively undoes the ebb action. The example underscores a necessary but somewhat unintuitive point about triggers: they are one-time and unidirectional. A rule whose trigger is vamp and ebb will activate when this expression changes from false to true but will not do anything or even notice if vamp and ebb subsequently becomes false again. If the complementary behavior is desired, this intent must be specified in a second rule. Often the complementary rule is subtly different from the exact negation of the original; for instance, the trigger vamp and flood is not the same as not(vamp and ebb). In the case of the Montezuma Salinity Control Structure, the flood and ebb triggers are not even based on the same variable (the gate is opened based on a head difference, closed based on velocity).

The middle_vamp_ebb example combines vamp, which is the seasonal applicability of the rule with ebb, which is a tidal phenomenon. There are also meaningful operating rules that do not need a trigger at all. For instance, the user might want to operate SWP and CVP pumping based on a time series but bound it by some fraction of Sacramento inflow. The trigger in this case is \u201cTRUE\u201d and it will go off once at startup. This is the default in the GUI if you leave the trigger blank.

If what you really want is a trigger that continuously monitors a true-false condition and applies a value accordingly, you may want to consider using the IFELSE function and no trigger. For instance:

SET ext_flow(node=17) TO IFELSE( vamp, ts1, ts2)\n

will set the boundary flow at node 17 (San Joaquin River) to time series ts1 whenever vamp is true and to ts2 when vamp is not true.

"},{"location":"manual/reference/Operating_Rule_Guide/#misfires-and-redundant-triggering","title":"Misfires and Redundant Triggering","text":"

Extra triggering and rule activation may seem harmless when you consider one rule in isolation. Rerunning an action hurts performance, but the action is redundant rather than harmful. The real problem with rules that misfire is that they are active too often and tend to interfere with (\u201clock out\u201d or \u201cbump\u201d) other rules that are trying to manipulate the same model variable.

Here is an example of misfiring trigger based on an expression using date terms:

(YEAR >= 1990 AND MONTH>=APR AND DAY>=14)\n

(note: a much better way to write this expression using the DATE keyword is given in the reference section)

Because of the ANDs, this expression requires three conditions to be true at once in order to evaluate to TRUE. It goes off as intended on or about 14APR1990. But what happens on 01MAY1990? On 14MAY1990? This trigger is going to evaluate to FALSE and then back to TRUE. When it makes the FALSE-TRUE transition it will cause the trigger to go off, which is probably not what was intended.

There is a fix for the above expression (not the recommended on) that illustrates that the only thing that matter are FALSE-TO-TRUE transitions. There is one more curious point about this example is that the correct behavior is obtained using:

(YEAR == 1990 AND MONTH == APR AND DAY >= 14)\n

Why? The rule will evaluate FALSE on or about 01MAY1990, but it will stay false!

These date examples are so common that there is a special way of dealing with them. See the function reference for DATE and SEASON.

"},{"location":"manual/reference/Operating_Rule_Guide/#default-true-trigger","title":"Default (TRUE) Trigger","text":"

If you leave the trigger definition blank in the GUI the trigger expression will be set to WHEN TRUE.

The TRUE trigger is roughly equivalent to \"at startup\" and you should be sure not to confuse it with \"always true\". Recall it is transitions that are important, and this trigger makes its only nominal FALSE-TO-TRUE transition once at the very beginning of the run. Once displaced by an overlapping action, the rule will never activate again

A rule that evaluates to a trivial FALSE will never do anything.

As an example of a situation where these concepts matter, consider a rule that toggles use of a gate for the entire simulation. By default, a gate in the model is installed. Assume we have set up an expression named use_barriers or remove_barriers indicating whether we want to use gates. Three possibilities for writing the rule are:

   TRIGGER                ACTION \n1. TRUE                SET gate_install(gate=...) TO use_gate\n2. use_gate            SET gate_install(gate...) TO INSTALL\n3. remove_gate         SET gate_install(gate=...) TO REMOVE\n

Option 1 uses the default trigger. It will be activated at startup and the gate installation will be set to the expression variable use_gate. Option 2 is interesting because it will never do anything useful. It will be evaluated once at the start of the run, but it will never trigger if use_gate is FALSE. It will trigger if use_gate is TRUE, but this merely carries out the default. Option 3 remedies this by using remove_gate -- the non-default -- as the trigger. Different users seem to regard different options (1) and (3) more intuitive.

"},{"location":"manual/reference/Operating_Rule_Guide/#conflicts","title":"Conflicts","text":"

When a rule is triggered, it will be activated unless it conflicts with another, active rule. Rules conflict when they operate on the same model variable. For instance, two rules that act to change a weir coefficient in the same gate/weir conflict.

Two specifications govern conflicts:

1. When a rule conflicts with an active rule it is deferred. Deferred rules are not activated, but they are tricked into thinking they evaluated FALSE so that the can possibly make a FALSE-TRUE transition again the next time step.

2. When a rule conflicts with another potentially activating rule, the results are \u201cundefined\u201d. We are unaware of any universal solution in this situation. The best solution is to write rules that don\u2019t do this \u2013 we are currently working on a better warning system to detect when this happens.

"},{"location":"manual/reference/Operating_Rule_Guide/#dsm2-variable-and-function-reference","title":"DSM2 Variable and Function Reference:","text":""},{"location":"manual/reference/Operating_Rule_Guide/#variables","title":"Variables","text":"

The variables from DSM2 that can be used in operating rules include boundary and grid variables that can be changed and those that are merely observable (read-only). The observable variables are divided between variables that can be set to time series (Dynamic Variables) that will apply ever-after and variables that can only be set to new static values (Static Variables)

"},{"location":"manual/reference/Operating_Rule_Guide/#dynamic-control-variables","title":"Dynamic Control Variables","text":"

These variables are dynamically controllable and can be set to a time series. Once the new time series is set, the boundary or structure being controlled will have no memory of its old controlling time series. Most dynamic variables are gate and boundary data.

gate_op(gate=textname,device=textname, direction=[to_node|from_node|to_from_node])\n

Device operating coefficients (0..1) in corresponding direction. Use keywords CLOSE (=0) and OPEN (=1) to make rules more readable. The option \"to_from_node\" is write-only -- a convenience feature that writes to two otherwise separate variables.

gate_position(gate=textname,device=textname)\n

Physical operation of control structure such as radial gate height (physical units). The interpretation of \"position\" is dependent on the \"control_type\" of the gate. If it is gated from the bottom, position indicates elevation and is the same as elev. If the control type is gated from the top, as in a radial gate, the position is the height. This variable is deprecated now, in favor of directly using \"elev\" or \"height\".

gate_height(gate=textname,device=textname)\n

Height of gate device.

gate_elev(gate=textname,device=textname)\n

Crest elevation or invert elevation of gate device.

gate_width(gate=textname,device=textname)\n

Width or radius of gate device.

ext_flow(name=textname)\n

External flow (boundary flows, source/sink)

transfer_flow(transfer=textname)\n

Flows in object-to-object transfers

"},{"location":"manual/reference/Operating_Rule_Guide/#static-control-variables","title":"Static Control Variables","text":"

These are variables that are normally static. You can set them to a constant. If you set them to a time series, the model will not complain, but the result may not be what you expect. The model variable will only be set to the current value of the series at the time the rule was activated. The variable won't keep changing with the time series.

gate_install(gate=textname)\n

Determines or inquires whether the given gate is installed.

SET gate_install(...) TO [REMOVE|FALSE]\n

completely removes the gate and restores an equal-stage compatibility condition to the channel junction.

SET gate_install(...) TO [INSTALL|TRUE]\n

installs the gate.

gate_coef(gate=textname,device=textname,direction=[to_node|from_node])\n

Gate coefficient of the device in the given direction. This is a physical quantitity of the structure, representing the roughness or efficiency of flow. It should not be used for operating controls such as flap gates. The coefficients will change only rarely when the actual site is altered and should never leave the range (0,1).

gate_nduplicate(gate=textname,device=textname)\n

Number of duplicate devices.

"},{"location":"manual/reference/Operating_Rule_Guide/#observable-variables","title":"Observable Variables","text":"

These are read-only model variables that cannot be manipulated directly, but can be observed and used in expressions for triggers and actions.

chan_flow(channel=number,dist=[number|length])\n

Flow in channel.dist=length indicates the end of the channel.

chan_vel(channel=number, dist=[number|length])\n

Velocity at given channel and distance.

chan_stage(channel=number,dist=[number|length])\n

Water Surface at given channel and distance.

chan_surf\n

Same as stage (water surface) in channel

res_stage(res=textname)\n

Water surface in reservoir

res_flow(res=textname, node=number)\n

Flow from reservoir to node

ts(name=textname)\n

Any time series named in the Operating Rule View of the GUI may be used by referencing the name. Time series evaluate to their value at the current time step.

"},{"location":"manual/reference/Operating_Rule_Guide/#model-time-queries","title":"Model Time Queries","text":"

The following commands retrieve model date or seasonal information:

YEAR, MONTH, DAY\n

Retrieves the year, month and day associated with the current model time step. These are returned as numbers. When testing them, you can (for clarity) use 3-letter abbreviations for the months. Examples:

YEAR >= 1991\n\nMONTH + 1 < MAY\n\nHOUR, MIN\n

Retrieve the (24 hour) hour and minute associated with the current model time step.

DATE\n

Returns a time stamp corresponding to the beginning of the day on the current model date. Example:

DATE >= 11OCT1992 (not time part)\n\nDT\n

Represents the model time step in seconds. This is often useful for use with ACCUMULATE

DATETIME\n

Returns a time stamp corresponding to the current model date and time. Example:

DATETIME > 04FEB1990 00:00 (date plus time)\n\nSEASON\n

Returns a time stamp relative to the beginning of the year corresponding to the beginning of the day on the current model date and time. Comparisons such as SEASON > 15APR AND SEASON \\<01MAY avoid common logical mistakes from building this from scratch. There is one other gotcha with SEASON that comes up at the end of time periods because the timestamp is always at 00:00. Compare SEASON > 15APR AND SEASON \\<01MAY SEASON > 15APR AND SEASON \u226430APR and notice that the latter does not include the entire day 30APR.

Note SEASON and DATE/DATETIME to combined expressions built from atomic expressions like day and month. They are clearer and avoid some curious gotchas. For instance DATE >= 14APR1990 will evaluate true only once per year, whereas (YEAR >= 1990 AND MONTH>=APR AND DAY>=14) will evaluate true on Apr 14, false on May 1 and true again on May 14. You could get the intended behavior with (YEAR == 1990 AND MONTH == APR AND DAY>=14), which will go from false to true only once, but the fix hardly seems worth the trouble.

"},{"location":"manual/reference/Operating_Rule_Guide/#numerical-operations","title":"Numerical Operations","text":"

The following operators and functions are available

+, -, *, /\n

Arithmetic operators with standard precedence of operations. You can use parenthesis to change the evaluation order.

x^3, x^y\n

Power of x and x to the power of y

MIN2(x,y)\n

Minimum of two arguments.

MAX2(x,y)\n

Maximum of two arguments.

MIN3(x,y,z)\n

Minimum of three arguments.

MAX3(x,y,z)\n

Maximum of three arguments.

SQRT(x)\n

Square root of x

EXP(x)\n

Exponent function (e to the power of x)

LN(x)\n

Natural log of x

LOG(x)\n

Base 10 log of x

"},{"location":"manual/reference/Operating_Rule_Guide/#logical-operations","title":"Logical Operations","text":"
x==y\n

Tests equality.

x<>y\n

Tests inequality.

x<y,x>y, x<=y, x>=y\n

Comparisons.

TRUE\n

The value TRUE

FALSE\n

The value FALSE

NOT expression\n

Negation of expression, as in NOT(x \\< y)

expr1 AND expr2\n

Logical \u2018and\u2019, which evaluates to TRUE only if both the expressions it joins are true. Expression (expr2) will not be evaluated if expr1 evaluates to FALSE.

expr1 OR expr2\n

Logical \u2018or\u2019

"},{"location":"manual/reference/Operating_Rule_Guide/#special-functions","title":"Special Functions","text":"
ACCUMULATE(expression, initval [,resetcond])\n

Cumulative value function. Accumulates additively the value of expression using initval (another numerical expression) as the initial condition and resetting the total anytime the resetcond evaluates to true. If you want to integrate you should multiply the expression by DT or else your rule won't be robust if someone changes the time step.

IFELSE(boolexpr, valexp1,valexpr2)\n

The ternary operator. If boolexpr returns true, returns the value given by valexpr1. If boolexpr returns false, returns the value given by valexpr2.

LOOKUP(expression, lookup_array,value_array)\n

Lookup values from a small user supplied table. The lookup array is provided using a bracketed, comma-separated list of values such as [1000.,2000.,3000.]. The value_array return values are similar but must have a length one smaller than the number of lookup values. The array values must be hard-wired numbers at the present time -- expressions are not allowed. The LOOKUP compares expression to elements of lookup_array. The highest element of the lookup table is currently a limit, not an actual lookup slot. The function returns the component of value_array corresponding to the highest index in lookup array that is \\<= expression, e.g.:

LOOKUP(1000.,[1000.,2000.,3000.], [1.,2.]) returns 1.\n\nLOOKUP(2000.,[1000.,2000.,3000.], [1.,2.]) returns 2.\n\nLOOKUP(3000.,[1000.,2000.,3000.], [1.,2.]) is an error.\n\nPID( PID(expression,target,low,high,K, Ti,Td,Tt,b)\n

Use PID (Proportional, Integral, Derivative) control to try to guide expression towards target. The parameters are as follows low: lower bound on control representing the minimum value the control value can take (e.g. for gate height this might be zero).

high: upper bound on control.

K: The constant representing the Proportion component of the control. The constant multiplies (expression-target) to change a control value, so choose a factor that is reasonable that takes the scaling of the expression to the scaling of the control.

Ti: Integral time constant of control

Td: Derivative time constant of control.

Tt: Time basis of \"anti-windup\"

b: Set-point weighting (use 1.0 if you are new to PID).

"},{"location":"manual/reference/Output_Channel/","title":"Output Channel","text":""},{"location":"manual/reference/Output_Channel/#overview","title":"Overview:","text":"

The OUTPUT_CHANNEL table is used by both HYDRO and QUAL to specify output requests inside of the channel. Output is HEC-DSS or text format. The variables that can be requested vary by model.

"},{"location":"manual/reference/Output_Channel/#tables","title":"Tables:","text":"
  • OUTPUT_CHANNEL
  • OUTPUT_CHANNEL_SOURCE_TRACK
"},{"location":"manual/reference/Output_Channel/#output_channel","title":"OUTPUT_CHANNEL","text":"

The table specifies the name for output request, as well as the location, variable being output, time aggregation and destination file.

"},{"location":"manual/reference/Output_Channel/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Output_Channel/#name","title":"NAME","text":"

Name of the output request. This is part of the identifier of the table and will be used in the B_PART of the output if it is in DSS format. Generally, non-modelers will have an easier time understanding your output if this is a station name that is geographically fixed (e.g. \"vernalis\" or \"RSAC075\") than if it is a modeling construct (\"ch101\"). Similarly, avoid using avoid using VARIABLE inside this name -- this causes redundancy in the output DSS path and the layering won't work as well.

"},{"location":"manual/reference/Output_Channel/#chan_no","title":"CHAN_NO","text":"

Channel number in which output is requested.

"},{"location":"manual/reference/Output_Channel/#distance","title":"DISTANCE","text":"

Distance along channel (from upstream node to downstream), typically in feet. Results will be interpolated between Eulerian (HYDRO) or Lagrangian (QUAL) computational points.

"},{"location":"manual/reference/Output_Channel/#variable","title":"VARIABLE","text":"

Model variable to be output. In HYDRO, you can request\u00a0stage,flow,vel. In QUAL you can request\u00a0stage,flow\u00a0or the name of any constituent in the model. When no output request is made for a constituent that is not required for reaction kinetics, it is not calculted.

"},{"location":"manual/reference/Output_Channel/#interval","title":"INTERVAL","text":"

Time Interval of the output. Can be any DSS-compliant interval with a unit that is not calendar dependent (MIN, HOUR, DAY). This is a departure from previous versions of DSM2, which offered monthly output.

"},{"location":"manual/reference/Output_Channel/#period_op","title":"PERIOD_OP","text":"

Period aggregation performed to convert the model time step into the time interval of the output. May be INST or AVE, which produce instantaneous

"},{"location":"manual/reference/Output_Channel/#file","title":"FILE","text":"

Name of the output file where the data will be stored. If the extension .txt is given, the output is automatically in text format. If a .dss extension is used, output is in HEC-DSS format.

"},{"location":"manual/reference/Output_Channel/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Output_Channel/#identifier","title":"Identifier:","text":"

NAME, VARIABLE

"},{"location":"manual/reference/Output_Channel/#parent-table","title":"Parent Table:","text":"

Table is parent

"},{"location":"manual/reference/Output_Channel/#include-block","title":"Include Block:","text":"

OUTPUT_TIME_SERIES

"},{"location":"manual/reference/Output_Channel/#output_channel_source_track","title":"OUTPUT_CHANNEL_SOURCE_TRACK","text":"

This table is identical to OUTPUT_CHANNEL except it is only used in QUAL and it contains one additional field for tracking constituent sources.

"},{"location":"manual/reference/Output_Channel/#field-descriptions_1","title":"Field Descriptions","text":""},{"location":"manual/reference/Output_Channel/#name_1","title":"NAME","text":"

Name of the output request. See comments above, and note that in this case you should also avoid using the SOURCE_NAME in the output name.

"},{"location":"manual/reference/Output_Channel/#chan_no_1","title":"CHAN_NO","text":"

Channel number in which output is requested.DISTANCEDistance along channel (from upstream node to downstream), typically in feet. Results will be interpolated between Eulerian (HYDRO) or Lagrangian (QUAL) computational points.

"},{"location":"manual/reference/Output_Channel/#variable_1","title":"VARIABLE","text":"

Model variable to be output. In HYDRO, you can request\u00a0stage,flow,vel. In QUAL you can request\u00a0stage,flow\u00a0or the name of any constituent in the model. When no output request is made for a constituent that is not required for reaction kinetics, it is not calculted.

"},{"location":"manual/reference/Output_Channel/#source_group","title":"SOURCE_GROUP","text":"

Name of the source group that is being tracked in this output request. To learn how to define a group, see\u00a0group\u00a0reference. The group used must consist entirely of boundary or source locations -- not water bodies.

"},{"location":"manual/reference/Output_Channel/#interval_1","title":"INTERVAL","text":"

Time Interval of the output. Can be any DSS-compliant interval with a unit that is not calendar dependent (MIN, HOUR, DAY). This is a departure from previous versions of DSM2, which offered monthly output.

"},{"location":"manual/reference/Output_Channel/#period_op_1","title":"PERIOD_OP","text":"

Period aggregation performed to convert the model time step into the time interval of the output. May be INST or AVE, which produce instantaneous

"},{"location":"manual/reference/Output_Channel/#file_1","title":"FILE","text":"

Name of the output file where the data will be stored. If the extension .txt is given, the output is automatically in text format. If a .dss extension is used, output is in HEC-DSS format.

"},{"location":"manual/reference/Output_Channel/#table-info_1","title":"Table Info","text":""},{"location":"manual/reference/Output_Channel/#identifier_1","title":"Identifier:","text":"

NAME, VARIABLE, SOURCE_GROUP

"},{"location":"manual/reference/Output_Channel/#parent-table_1","title":"Parent Table:","text":"

Table is parent

"},{"location":"manual/reference/Output_Channel/#include-block_1","title":"Include Block:","text":"

OUTPUT_TIME_SERIES

  • Finer output is preferred to daily. You can easily average to daily later outside the model using a script or time series application. Tidal data are poorly represented by daily aggregations, and numerous incorrect conclusions have arisen from aliasing (fluctuations over two weeks) when a 24 hour daily averaging operation is imposed on a naturally 25-hour phenomenon. Monthly output is no longer allowed.
"},{"location":"manual/reference/Output_Gate/","title":"Output Gate","text":""},{"location":"manual/reference/Output_Gate/#overview","title":"Overview:","text":"

The OUTPUT_GATE table is used by both HYDRO to specify output requests at a gate. Output is HEC-DSS or text format.

"},{"location":"manual/reference/Output_Gate/#tables","title":"Tables:","text":"
  • OUTPUT_GATE
"},{"location":"manual/reference/Output_Gate/#output_gate","title":"OUTPUT_GATE","text":"

The table specifies the name for output request, as well as the location, variable being output, time aggregation and destination file.

"},{"location":"manual/reference/Output_Gate/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Output_Gate/#name","title":"NAME","text":"

Name of the output request. This is the identifier of the table and will be used in the B_PART of the output if it is in DSS format. It can be the same as the gate name but it doesn't have to be. Avoid using VARIABLE inside this name -- this causes redundancy in the output and the layering won't work correctly.

"},{"location":"manual/reference/Output_Gate/#gate_name","title":"GATE_NAME","text":"

Name of the gate at which output is requested.

"},{"location":"manual/reference/Output_Gate/#device","title":"DEVICE","text":"

Name of the gate device, if applicable. You can request operational or physical data from a device as well as flow. You can also request some gate output (install,flow) that is not linked to a particular device. In this case, the field should be set to\u00a0none

"},{"location":"manual/reference/Output_Gate/#variable","title":"VARIABLE","text":"

Model variable to be output. From a device you can request some physical data (width,\u00a0height,\u00a0elev), operational data (op_to_node,\u00a0op_from_node,\u00a0position\u00a0) or\u00a0flow\u00a0oriented from water body to node. From a gate with device=none you can request the variables\u00a0install, or total\u00a0flow\u00a0oriented from water body to node

"},{"location":"manual/reference/Output_Gate/#interval","title":"INTERVAL","text":"

Time Interval of the output. Can be any DSS-compliant interval with a unit that is not calendar dependent (MIN, HOUR, DAY). This is a departure from previous versions of DSM2, which offered monthly output.

"},{"location":"manual/reference/Output_Gate/#period_op","title":"PERIOD_OP","text":"

Period aggregation performed to convert the model time step into the time interval of the output. May be INST or AVE, but AVE can be meaningless for a lot of gate variables.

"},{"location":"manual/reference/Output_Gate/#file","title":"FILE","text":"

Name of the output file where the data will be stored. If the extension .txt is given, the output is automatically in text format. If a .dss extension is used, output is in HEC-DSS format.

"},{"location":"manual/reference/Output_Gate/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Output_Gate/#identifier","title":"Identifier:","text":"

NAME, VARIABLE

"},{"location":"manual/reference/Output_Gate/#parent-table","title":"Parent Table:","text":"

Table is parent

"},{"location":"manual/reference/Output_Gate/#include-block","title":"Include Block:","text":"

OUTPUT_TIME_SERIES

"},{"location":"manual/reference/Output_Gate/#examples","title":"Examples:","text":"

output_gate_example.inp\u00a0

  • Initially, the thing that is hard to get about gate output is the flow orientation. The output for the gate is oriented with the gate, which may or may not be in the upstream-downstream direction
"},{"location":"manual/reference/Output_Reservoir/","title":"Output Reservoir","text":""},{"location":"manual/reference/Output_Reservoir/#overview","title":"Overview:","text":"

The OUTPUT_RESERVOIR table is used by both HYDRO and QUAL to specify output requests inside of a reservoir. Output is HEC-DSS or text format. The variables that can be requested vary by model.

"},{"location":"manual/reference/Output_Reservoir/#tables","title":"Tables:","text":"
  • OUTPUT_RESERVOIR
  • OUTPUT_RESERVOIR_SOURCE_TRACK
"},{"location":"manual/reference/Output_Reservoir/#output_reservoir","title":"OUTPUT_RESERVOIR","text":"

The table specifies the name for output request, as well as the location, variable being output, time aggregation and destination file.

"},{"location":"manual/reference/Output_Reservoir/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Output_Reservoir/#name","title":"NAME","text":"

Name of the output request. This is part of the identifier of the table and will be used in the B_PART of the output if it is in DSS format. Avoid using VARIABLE inside this name -- this causes redundancy in the output DSS path and the layering won't work as well.

"},{"location":"manual/reference/Output_Reservoir/#res_name","title":"RES_NAME","text":"

NAME of reservoir in which output is requested.

"},{"location":"manual/reference/Output_Reservoir/#node","title":"NODE","text":"

Node number, if the request is for a flow to a particular connected node.

"},{"location":"manual/reference/Output_Reservoir/#variable","title":"VARIABLE","text":"

Model variable to be output. In HYDRO, you can request\u00a0stage,flow,vel. In QUAL you can request\u00a0stage,flow\u00a0or the name of any constituent in the model. When no output request is made for a constituent that is not required for reaction kinetics, it is not calculted.

"},{"location":"manual/reference/Output_Reservoir/#interval","title":"INTERVAL","text":"

Time Interval of the output. Can be any DSS-compliant interval with a unit that is not calendar dependent (MIN, HOUR, DAY). This is a departure from previous versions of DSM2, which offered monthly output.

"},{"location":"manual/reference/Output_Reservoir/#period_op","title":"PERIOD_OP","text":"

Period aggregation performed to convert the model time step into the time interval of the output. May be INST or AVE, which produce instantaneous

"},{"location":"manual/reference/Output_Reservoir/#file","title":"FILE","text":"

Name of the output file where the data will be stored. If the extension .txt is given, the output is automatically in text format. If a .dss extension is used, output is in HEC-DSS format.

"},{"location":"manual/reference/Output_Reservoir/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Output_Reservoir/#identifier","title":"Identifier:","text":"

NAME, VARIABLE

"},{"location":"manual/reference/Output_Reservoir/#parent-table","title":"Parent Table:","text":"

Table is parent

"},{"location":"manual/reference/Output_Reservoir/#include-block","title":"Include Block:","text":"

OUTPUT_TIME_SERIES

"},{"location":"manual/reference/Output_Reservoir/#output_reservoir_source_track","title":"OUTPUT_RESERVOIR_SOURCE_TRACK","text":"

This table is identical to OUTPUT_RESERVOIR except it is only used in QUAL and it contains one additional field for tracking constituent sources.

"},{"location":"manual/reference/Output_Reservoir/#field-descriptions_1","title":"Field Descriptions","text":""},{"location":"manual/reference/Output_Reservoir/#name_1","title":"NAME","text":"

Name of the output request. See comments above, and note that in this case you should also avoid using the SOURCE_NAME in the output name.

"},{"location":"manual/reference/Output_Reservoir/#res_name_1","title":"RES_NAME","text":"

Name of reservoir in which output is requested.

"},{"location":"manual/reference/Output_Reservoir/#node_1","title":"NODE","text":"

Node number, if the request is for a flow to a particular connected node. Otherwise, use\u00a0none

"},{"location":"manual/reference/Output_Reservoir/#variable_1","title":"VARIABLE","text":"

Model variable to be output. In HYDRO, you can request\u00a0stage,flow,vel. In QUAL you can request\u00a0stage,flow\u00a0or the name of any constituent in the model. When no output request is made for a constituent that is not required for reaction kinetics, it is not calculted.

"},{"location":"manual/reference/Output_Reservoir/#source_group","title":"SOURCE_GROUP","text":"

Name of the source group that is being tracked in this output request. To learn how to define a group, see\u00a0group\u00a0reference. The group used must consist entirely of boundary or source locations -- not water bodies.

"},{"location":"manual/reference/Output_Reservoir/#interval_1","title":"INTERVAL","text":"

Time Interval of the output. Can be any DSS-compliant interval with a unit that is not calendar dependent (MIN, HOUR, DAY). This is a departure from previous versions of DSM2, which offered monthly output.

"},{"location":"manual/reference/Output_Reservoir/#period_op_1","title":"PERIOD_OP","text":"

Period aggregation performed to convert the model time step into the time interval of the output. May be INST or AVE, which produce instantaneous

"},{"location":"manual/reference/Output_Reservoir/#file_1","title":"FILE","text":"

Name of the output file where the data will be stored. If the extension .txt is given, the output is automatically in text format. If a .dss extension is used, output is in HEC-DSS format.

"},{"location":"manual/reference/Output_Reservoir/#table-info_1","title":"Table Info","text":""},{"location":"manual/reference/Output_Reservoir/#identifier_1","title":"Identifier:","text":"

NAME, VARIABLE, SOURCE_GROUP

"},{"location":"manual/reference/Output_Reservoir/#parent-table_1","title":"Parent Table:","text":"

Table is parent

"},{"location":"manual/reference/Output_Reservoir/#include-block_1","title":"Include Block:","text":"

OUTPUT_TIME_SERIES

  • Finer output is preferred to daily. You can easily average to daily later outside the model using a script or time series application. Tidal data are poorly represented by daily aggregations, and numerous incorrect conclusions have arisen from aliasing (fluctuations over two weeks) when a 24 hour daily averaging operation is imposed on a naturally 25-hour phenomenon. Monthly output is no longer allowed.
"},{"location":"manual/reference/PTM_Output_Files/","title":"PTM Output Files","text":"

PTM outputs a trace.out and animation.bin file in addition to the .dss files.\u00a0

The animation binary file outputs in Java binary format the snapshot location of all particles in the simulation.\u00a0

The trace output file only records the event (timestamp) when each particle passes from one waterbody to another waterbody.

All indices are internal global index of grid. All times are in Julian time.

Table. Content in Trace.out

1st col

2nd col

3rd col

4th col

header row

start time

end time

time step

total particle number

content row

event time

particle id

node id particle passing

waterbody particle entering

trace.out is written by ParticleObserver, which is incorporated in each particle, then read by flux class.

Time is in Julian minute.

"},{"location":"manual/reference/Particle_Filter/","title":"Particle Filter","text":""},{"location":"manual/reference/Particle_Filter/#overview","title":"Overview","text":"

Particle Filter is a section in the PTM input where you set up particle-filters. Particle Filter is designed to modify the particle flux at node, without changing hydrodynamic condition, by keeping particles from entering the specified waterbody.

"},{"location":"manual/reference/Particle_Filter/#tables","title":"Tables","text":"

Example

PARTICLE_FILTER \nNAME       NODE WATERBODY FILLIN FILE                 PATH \nfilter_hor 8    chan:54   last   constant             0 \nfilter_nf  280  chan:357  last   ./Filter_OP_NF.dss   /HIST+FILTER/FILTER_NF/FILTER_OP//IR-DECADE/DWR-BDO/  \nEND\n

This is for a normal filter, which locates at a node connecting to any waterbody. The PARTICLE_FILTER table defines particle filters by giving them names, associating them to a node and a waterbody, and setting up the passing efficiency (which could be constant value, or time-varying data in DSS).

"},{"location":"manual/reference/Particle_Filter/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Particle_Filter/#name","title":"NAME","text":"

Name assigned to the particle filter. This is the identifier of the filter used elsewhere to refer to the filter.

"},{"location":"manual/reference/Particle_Filter/#node","title":"NODE","text":"

The ID of the node to which the filter is attached.

"},{"location":"manual/reference/Particle_Filter/#at_wb","title":"AT_WB","text":"

The type and ID of the waterbody to which the filter is attached .

"},{"location":"manual/reference/Particle_Filter/#fillin","title":"FILLIN","text":"

Method for filling in data if the time step of the assigned series is coarser than the time step of the model. See\u00a0fillin types.

"},{"location":"manual/reference/Particle_Filter/#file","title":"FILE","text":"

DSS or text file in which data are stored. Use consistent case when referring to the same file. You may also enter the word constant if you would like to assign a constant value to the input (the value will be entered in the next column).**

"},{"location":"manual/reference/Particle_Filter/#path","title":"PATH","text":"

The path within the text or DSS file of the time series data. If you used the constant keyword in the Input File column, enter the value here. The stored variable is particle passing efficiency, a float value between 0 ~ 1: 0-block; 1-totally pass.

Filter locates on 1 node, at the side of 1 waterbody (channel, reservoir, source flow). Every filter is unique with 1 node & 1 waterbody.

Filter is 2-directional, and is designed to function differently for each direction \\<1> waterbody->node: filter serves as a totally block with passing efficiency 0; \\<2> node->waterbody: filter is designed for changing particle decision-making, with passing efficiency as a re-adjusting factor; thus it only functions when node is connected with multiple waterbodies; i.e. it will not have any effect for a single chain of channel

Currently there's no delay concept for filter, because the filter delay rule is unknown, and our major concern is particles' fate (particle's movement only depends on hydro conditions, not interacted with each other)

"},{"location":"manual/reference/Particle_Flux_Output/","title":"Particle Flux Output","text":""},{"location":"manual/reference/Particle_Flux_Output/#overview","title":"Overview:","text":"

PARTICLE_FLUX_OUTPUT is a section in the PTM text input that specifies how the PTM records the number of particles in a group of water bodies into a DSS output.

Example

FLUX_OUTPUT\nNAME         FROM_WB           TO_WB             INTERVAL FILENAME\nTWITCHELL    res:clifton_court group:swp         15MIN    ${PTMOUTPUTFILE}\nEMMATON      chan:216          group:cvp         15MIN    ${PTMOUTPUTFILE}\nDIVERSION_AG group:all         group:ag_div      15MIN    ${PTMOUTPUTFILE}\nEND\n
"},{"location":"manual/reference/Particle_Flux_Output/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Particle_Flux_Output/#name","title":"NAME","text":"

This is the name that will go in the B_PART of the output.

"},{"location":"manual/reference/Particle_Flux_Output/#from_wb","title":"FROM_WB","text":"

Name of the water body or group that is the \"from\" location of the flux.

"},{"location":"manual/reference/Particle_Flux_Output/#to_wb","title":"TO_WB","text":"

Name of the water body or group that is the \"to\" destination in the flux.

"},{"location":"manual/reference/Particle_Flux_Output/#interval","title":"INTERVAL","text":"

Interval at which to record residence.

"},{"location":"manual/reference/Particle_Flux_Output/#filename","title":"FILENAME","text":"

The name of the output file. If the file extension is *.dss, output is in DSS format. If the file extension is *.txt a text file output is produced.

"},{"location":"manual/reference/Particle_Flux_Output/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Particle_Flux_Output/#identifier","title":"Identifier:","text":"

NAME

Particle flux output can be in absolute number of particles or percentage of injection. The option is set by PTM_FLUX_PERCENT in the SCALAR section.

"},{"location":"manual/reference/Particle_Group_Output/","title":"Particle Group Output","text":""},{"location":"manual/reference/Particle_Group_Output/#overview","title":"Overview:","text":"

PARTICLE_GROUP_OUTPUT is a section in the PTM input that specifies DSS output to record residence of particles in group of water bodies.

Example

PARTICLE_GROUP_OUTPUT \nNAME       GROUP_NAME INTERVAL   FILENAME \nTWITCHELL  twitchell  1HOUR      ${PTMOUTPUTFILE} \nEMMATON    emmaton    1HOUR      ${PTMOUTPUTFILE} \nEND\n

Field Descriptions

"},{"location":"manual/reference/Particle_Group_Output/#name","title":"NAME","text":"

This is the output name that will go in the B_PART of the output.

"},{"location":"manual/reference/Particle_Group_Output/#group_name","title":"GROUP_NAME","text":"

Name of the group defined in GROUP.

"},{"location":"manual/reference/Particle_Group_Output/#interval","title":"INTERVAL","text":"

Interval at which to record residence.

"},{"location":"manual/reference/Particle_Group_Output/#filename","title":"FILENAME","text":"

The name of the output file. If the file extension is *.dss, output is in DSS format. If the file extension is *.txt a text file output is produced.

"},{"location":"manual/reference/Particle_Group_Output/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Particle_Group_Output/#identifier","title":"Identifier:","text":"

NAME

Similar as particle flux output.

"},{"location":"manual/reference/Particle_Insertion/","title":"Particle Insertion","text":""},{"location":"manual/reference/Particle_Insertion/#overview","title":"Overview","text":"

Particle Insertion is a section in the PTM input that insertion of particles in water bodies over time. The PTM can insert multiple sets of particles.

"},{"location":"manual/reference/Particle_Insertion/#tables","title":"Tables","text":"

Example

PARTICLE_INSERTION \nNODE  NPARTS   DELAY  DURATION     \n1     1000     0hour  1day     \n13    1000     1day   0hour     \nEND\n

The Rate Coefficient Table lists reaction rate coefficients for non-conservative constituents. Different rates can be assigned to different water bodies. The assignment is done using groups -- first you define a group and then you assign rate coefficients to the group.

"},{"location":"manual/reference/Particle_Insertion/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Particle_Insertion/#node","title":"NODE","text":"

The node at which the insertion is made.

"},{"location":"manual/reference/Particle_Insertion/#nparts","title":"NPARTS","text":"

Number of particles.

"},{"location":"manual/reference/Particle_Insertion/#delay","title":"DELAY","text":"

Delay before the first insertion after the beginning of the PTM runs. The unit of time needs to be attached without spaces.

"},{"location":"manual/reference/Particle_Insertion/#duration","title":"DURATION","text":"

Interval over which insertion is evenly distributed in time. If the time is set as zero, all the particles are inserted instantaneously. The unit of time needs to be attached without spaces.

"},{"location":"manual/reference/Particle_Reservoir_Filter/","title":"Particle Reservoir Filter","text":""},{"location":"manual/reference/Particle_Reservoir_Filter/#overview","title":"Overview","text":"

Particle Reservoir Filter is a section in the PTM input where you set up particle-filters. Particle Reservoir Filter is designed to modify the particle flux at reservoir, without changing hydrodynamic condition, by keeping particles from entering the specified waterbody.

"},{"location":"manual/reference/Particle_Reservoir_Filter/#tables","title":"Tables","text":"

Example

PARTICLE_RES_FILTER \nNAME          RES_NAME      WATERBODY          FILLIN FILE           PATH \nclfc_div_bbid clifton_court qext:dicu_div_bbid last   ./filterOp.dss /HIST+FILTER/CLFC_DIV/FILTER_OP//IR-DECADE/DWR-BDO/  \nEND\n

This is a special filter, which locates at a reservoir directly connecting to a source flow. The PARTICLE_RES_FILTER table defines particle filters by giving them names, associating them to a reservoir and one of its directly connecting waterbody, and setting up the passing efficiency. (which could be constant value, or time-varying data in DSS).

"},{"location":"manual/reference/Particle_Reservoir_Filter/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Particle_Reservoir_Filter/#name","title":"NAME","text":"

Name assigned to the particle filter. This is the identifier of the filter used elsewhere to refer to the filter.

"},{"location":"manual/reference/Particle_Reservoir_Filter/#res_name","title":"RES_NAME","text":"

The name of the reservoir to which the filter is applied.

"},{"location":"manual/reference/Particle_Reservoir_Filter/#at_wb","title":"AT_WB","text":"

The type and ID of the waterbody to which the filter is attached .

"},{"location":"manual/reference/Particle_Reservoir_Filter/#fillin","title":"FILLIN","text":"

Method for filling in data if the time step of the assigned series is coarser than the time step of the model. See\u00a0fillin types.

"},{"location":"manual/reference/Particle_Reservoir_Filter/#file","title":"FILE","text":"

DSS or text file in which data are stored. Use consistent case when referring to the same file. You may also enter the word constant if you would like to assign a constant value to the input (the value will be entered in the next column).**

"},{"location":"manual/reference/Particle_Reservoir_Filter/#path","title":"PATH","text":"

The path within the text or DSS file of the time series data. If you used the constant keyword in the Input File column, enter the value here. The stored variable is particle passing efficiency, a float value between 0 ~ 1: 0-block; 1-totally pass. **

Similar as Particle Filter

"},{"location":"manual/reference/Rate_Coefficients/","title":"Rate Coefficients","text":""},{"location":"manual/reference/Rate_Coefficients/#overview","title":"Overview","text":"

Rate Coefficients are reaction and growth rates assigned to non-conservative constituents. This table assigns the rates to groups of water bodies (there usually aren't enough data to support individual assignments).

"},{"location":"manual/reference/Rate_Coefficients/#tables","title":"Tables","text":"

Example

# sample algae rate coefficients in a channel group\nRATE_COEFFICIENT\nGROUP_NAME  CONSTITUENT  VARIABLE  VALUE \nchan_10_15\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 algae\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 alg_die\u00a0\u00a0\u00a0 0.2 \nchan_10_15\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 algae\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 alg_grow\u00a0\u00a0 1.5 \nchan_10_15\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 algae\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 alg_resp\u00a0 0.15 \nchan_10_15\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 algae\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 settle\u00a0\u00a0\u00a0\u00a0 0.2 \nEND\n

The Rate Coefficient Table lists reaction rate coefficients for non-conservative constituents. Different rates can be assigned to different water bodies. The assignment is done using groups -- first you define a group and then you assign rate coefficients to the group.

"},{"location":"manual/reference/Rate_Coefficients/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Rate_Coefficients/#group_name","title":"GROUP_NAME","text":"

Name of the group to which the coefficient entry is assigned.

"},{"location":"manual/reference/Rate_Coefficients/#constituent","title":"CONSTITUENT","text":"

Non-conservative constituent with which coefficient is associated.

"},{"location":"manual/reference/Rate_Coefficients/#variable","title":"VARIABLE","text":"

Physical process governed by coefficient.

"},{"location":"manual/reference/Rate_Coefficients/#value","title":"VALUE","text":"

Value assigned to the coefficient

"},{"location":"manual/reference/Rate_Coefficients/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Rate_Coefficients/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Rate_Coefficients/#include-block","title":"Include Block:","text":"

QUAL_SPATIAL

Assignments on higher layers supersede assignments on lower layers, even if the patterns that cause the assignment are not the same.

All channels must have rate coefficients for non-conservative DO runs.

"},{"location":"manual/reference/Reservoir/","title":"Reservoir","text":""},{"location":"manual/reference/Reservoir/#overview","title":"Overview","text":"

Reservoirs are open bodies of water that store flow and are connected to nodes by means of an energy-based equation. Reservoirs are considered instantly well-mixed.

  • The Reservoirs Table specifies the identity and physical properties of the reservoir.
  • Connections to nodes are specified in the Reservoir Connections table.\u00a0
  • Reservoir area as a function of elevation is specified in Reservoir volume table, while volume is calculated in code (since 8.2)
"},{"location":"manual/reference/Reservoir/#reservoir-table","title":"RESERVOIR Table","text":"

A sample is given below

Example

# Description:\n# Setting of Clifton Court Forebay\nRESERVOIR\nNAME  AREA  BOT_ELEV   \nclifton_court       91.868000   -7.748      \nEND\n

The RESERVOIR table defines the name and physical properties of the reservoir.\u00a0In the case of a \"tank\" like reservoir the area and volume are simply defined by the constant area and bottom elevation times the constant area, respectively.

"},{"location":"manual/reference/Reservoir/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Reservoir/#name","title":"NAME","text":"

Name of the reservoir. This is the identifier of the reservoir used in other tables.

"},{"location":"manual/reference/Reservoir/#area","title":"AREA","text":"

Surface area (in units of million sq ft) of the reservoir at typical depth. This area is used to calculate volume changes.

"},{"location":"manual/reference/Reservoir/#bot_elev","title":"BOT_ELEV","text":"

Elevation (ft) of the bottom of the reservoir.

"},{"location":"manual/reference/Reservoir/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Reservoir/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Reservoir/#include-block","title":"Include Block:","text":"

GRID

"},{"location":"manual/reference/Reservoir/#reservoir_connection-table","title":"RESERVOIR_CONNECTION Table","text":"

See sample below

Example

# Description:\n# Setting of Frank Tract Connections\nRESERVOIR_CONNECTION\n\nRES_NAME  NODE  COEF_IN  COEF_OUT   \nfranks_tract        103   2250.000  2250.000     \nfranks_tract        216   1500.000  1500.000    \nEND\n

The RESERVOIR_CONNECTION table lists reservoir connections to neighboring nodes. Flow through reservoir connections is calculated using the following formula

Q = Cto sqrt[ 2g(znode - zres) ] ... zres \\< znode

Q = Cfrom sqrt[ 2g(zres - znode) ] ... zres > znode

Where:

  • Cto and Cfrom are coefficients representing the hydraulic efficiency of the reservoir connection and the nominal Area perpendicular to flow.
  • g is gravity and
  • zres and znode are the water surface elevations at the reservoir and node (node surface is assessed by means of a reference channel that has no reservoirs attached to it).
"},{"location":"manual/reference/Reservoir/#field-descriptions_1","title":"Field Descriptions","text":""},{"location":"manual/reference/Reservoir/#res_name","title":"RES_NAME","text":"

Name of reservoir at which connection is specified.

"},{"location":"manual/reference/Reservoir/#node","title":"NODE","text":"

Number identifying the node at which connection is specified.

"},{"location":"manual/reference/Reservoir/#coef_in","title":"COEF_IN","text":"

Coefficient from node to reservoir, greater than zero. If you compare the reservoir equation to the gate or other orifice equation you will find that the reservoir coefficient actually folds several quantities into one parameter: a flow efficiency (between zero and one) and a area of flow. If you have an observation of the area normal to flow, the coefficient should be some fraction of this aperture.

"},{"location":"manual/reference/Reservoir/#coef_out","title":"COEF_OUT","text":"

Flow direction out of the reservoir.

"},{"location":"manual/reference/Reservoir/#table-info_1","title":"Table Info","text":""},{"location":"manual/reference/Reservoir/#identifier_1","title":"Identifier:","text":"

RES_NAME, NODE

"},{"location":"manual/reference/Reservoir/#parent-table","title":"Parent Table:","text":"

RESERVOIR

"},{"location":"manual/reference/Reservoir/#parent-identifier","title":"Parent Identifier:","text":"

RES_NAME

"},{"location":"manual/reference/Reservoir/#include-block_1","title":"Include Block:","text":"

GRID

A node may not have more than three reservoir connections and must have at least one ungated channel connection.

"},{"location":"manual/reference/Reservoir/#reservoir_vol-table","title":"RESERVOIR_VOL Table","text":"

See sample below

RESERVOIR_VOL\nRES_NAME                 ELEV          AREA\nliberty                -61.975          0.000\nliberty                -32.808          2.478\nliberty                -16.404         16.220\nliberty                 -3.281        272.328\nliberty                 -1.640       1017.270\nliberty                  0.000       1999.522\nliberty                  1.640       3031.999\nliberty                  3.281       4209.851\nliberty                  4.921       4584.028\nliberty                  6.562       5190.456\nliberty                  8.202       6359.679\nliberty                  9.843       6636.050\nliberty                 13.123       6731.118\nliberty                 16.404       6830.894\nliberty                 19.685       6876.916\nliberty                 22.966       6890.138\nEND\n

Since version 8.2, reservoirs can also have variable area and volume defined as a function of elevation. This table still requires the reservoir to be defined in the\u00a0RESERVOIR table even though the elevation area specified in the RESERVOIR table will be ignored if it is specified here.

"},{"location":"manual/reference/Reservoir/#field-descriptions_2","title":"Field Descriptions","text":""},{"location":"manual/reference/Reservoir/#res_name_1","title":"RES_NAME","text":"

Name of the reservoir. This is the identifier of the reservoir should have been specified in the RESERVOIR table (Elevation and area from that table are ignored)

"},{"location":"manual/reference/Reservoir/#elev","title":"ELEV","text":"

Elevation (ft) of the reservoir at which the area and volume are specified. This elevation is to the datum of the rest of model (currently NAVD88)

"},{"location":"manual/reference/Reservoir/#area_1","title":"AREA","text":"

Surface area (in acres) of the reservoir at specified elevation. The area is interpolated between elevations based on the current elevation of the water level

(Reference: Annual Report 2015, Chapter 2). See Figure below for a snippet

"},{"location":"manual/reference/Reservoir/#volume","title":"VOLUME","text":"

Volume (in units of acre-ft) of the reservoir at specified elevation. The volume is calculated as explained here.

"},{"location":"manual/reference/Reservoir/#table-info_2","title":"Table Info","text":""},{"location":"manual/reference/Reservoir/#identifier_2","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Reservoir/#include-block_2","title":"Include Block:","text":"

GRID

"},{"location":"manual/reference/Reservoir_Concentration/","title":"Reservoir Concentration","text":""},{"location":"manual/reference/Reservoir_Concentration/#overview","title":"Overview","text":"

Reservoir concentration represents the concentration of constituents attached to reservoir sources.

"},{"location":"manual/reference/Reservoir_Concentration/#tables","title":"Tables","text":"

Example

# Description:\n# BBID EC concentration in Clifton Court Forebay\nRESERVOIR_CONCENTRATION\nNAME            RES_NAME      VARIABLE FILLIN FILE                                          PATH   \ndicu_drain_bbid clifton_court ec       last   ../../timeseries/dicuwq_3vals_extended.dss    /DICU-HIST+RSVR/BBID/DRAIN-EC//1MON/DWR-BDO/           \nEND\n

The RESERVOIR_CONCENTRATION table attaches concentrations to boundary and source flows defined in QUAL. The table also assigns a time series to the source.

"},{"location":"manual/reference/Reservoir_Concentration/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Reservoir_Concentration/#name","title":"NAME","text":"

Name assigned to the source. An entry here must have the same name as an entry in the BOUNDARY_STAGE, BOUNDARY_FLOW or SOURCE_FLOW tables -- by matching names you will attach concentrations to the flow.

"},{"location":"manual/reference/Reservoir_Concentration/#node_no","title":"NODE_NO","text":"

Node number where the flow is applied. This must match the node number given in the original flow table (it is a bit redundant, but easier to look things up).

"},{"location":"manual/reference/Reservoir_Concentration/#variable","title":"VARIABLE","text":"

Constituent name. If no output is requested for the constituent currently it will be ignored.

"},{"location":"manual/reference/Reservoir_Concentration/#fillin","title":"FILLIN","text":"

Method for filling in data if the time step of the assigned series is coarser than the time step of the model.

"},{"location":"manual/reference/Reservoir_Concentration/#file","title":"FILE","text":"

DSS or text file in which data are stored. Use consistent case when referring to the same file. You may also enter the word constant if you would like to assign a constant value to the input (the value will be entered in the next column).

"},{"location":"manual/reference/Reservoir_Concentration/#path","title":"PATH","text":"

The path within the text or DSS file of the time series data. If you used the constant keyword in the Input File column, enter the value (e.g. 4.22) here.

"},{"location":"manual/reference/Reservoir_Concentration/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Reservoir_Concentration/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Reservoir_Concentration/#include-block","title":"Include Block:","text":"

QUAL_TIME_SERIES

Multiple sources and sinks can be assigned to a reservoir. They are often kept separate in order to assign different concentrations to them.

"},{"location":"manual/reference/Reservoir_Initial_Condition/","title":"Reservoir Initial Condition","text":""},{"location":"manual/reference/Reservoir_Initial_Condition/#overview","title":"Overview","text":"

HYDRO requires a water surface initial condition at reservoirs. The Reservoir IC view allows the user to specify default initial conditions. The default initial condition is required, but will be overridden if a restart file is used.

"},{"location":"manual/reference/Reservoir_Initial_Condition/#tables","title":"Tables","text":"

Example

# Description:\n# Initial Condition of Clifton Court Forebay\nRESERVOIR_IC \nRES_NAME          STAGE   \nclifton_court     5.000          \nEND\n

The RESERVOIR_CONCENTRATION table attaches concentrations to boundary and source flows defined in QUAL. The table also assigns a time series to the source.

"},{"location":"manual/reference/Reservoir_Initial_Condition/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Reservoir_Initial_Condition/#res_name","title":"RES_NAME","text":"

Name of reservoir where initial condition is to be applied.

"},{"location":"manual/reference/Reservoir_Initial_Condition/#stage","title":"STAGE","text":"

Initial water surface elevation.

"},{"location":"manual/reference/Reservoir_Initial_Condition/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Reservoir_Initial_Condition/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Reservoir_Initial_Condition/#include-block","title":"Include Block:","text":"

INITIAL_CONDITION

Default initial values are replaced if a restart file is used.

Currently, QUAL cannot take a spatially distributed default initial condition for constituent concentrations. It has only a single scalar.

Consistency should be maintained between initial reservoir-channel stage differences and flows at nodes. If the reservoir is assigned a different initial stage than surrounding channels, the head difference implies a flow described by the reservoir equations (see Reservoir View). Unless you are very careful to balance the implied flow with other channel flows, the initial time step will have a mass imbalance. If what you want is an easy initial condition, try making stage in the reservoir equal to the stage in all the surrounding channels and making the initial flow zero. Alternatively, you can put up with the imbalance -- just run HYDRO for an hour or so extra before the start of any QUAL run you want do.

"},{"location":"manual/reference/Scalar/","title":"Scalar","text":""},{"location":"manual/reference/Scalar/#overview","title":"Overview","text":"

Scalars are scalar model variables used to specify model-wide numerical properties and echoed output levels. They are the equivalent of the text input SCALAR section. All of the parameters are interpreted as text and can be replace by ENVVARS.

"},{"location":"manual/reference/Scalar/#tables","title":"Tables","text":"

Example

SCALAR \nNAME  VALUE   \nbinary_output                       false     \ncheckdata                           false     \ncont_bad                            false     \ncont_missing                        true     \nEND\n

The SCALAR table comprises name-value pairs for scalars. The scalars that are allowed depend on the specific model.

"},{"location":"manual/reference/Scalar/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Scalar/#name","title":"NAME","text":"

Name of the parameter. This is the identifier of the parameter.

"},{"location":"manual/reference/Scalar/#value","title":"VALUE","text":"

Value assigned to the parameter. These are interpreted by the model first as text (to allow substitution using ENVVARS) and then converted to the correct data type and validated. For boolean (true/false) one letter is sufficient.

"},{"location":"manual/reference/Scalar/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Scalar/#identifier","title":"Identifier:","text":"

NAME

"},{"location":"manual/reference/Scalar/#parent-table","title":"Parent Table:","text":"

Table is parent

"},{"location":"manual/reference/Scalar/#include-block","title":"Include Block:","text":"

PARAMETER

Generally you will work with the standard parameters distributed with DSM2. You always have to provide RUN_START_DATE, RUN_END_DATE as the defaults are deliberately designed to halt the model.

"},{"location":"manual/reference/Source_Flow/","title":"Source Flow","text":""},{"location":"manual/reference/Source_Flow/#overview","title":"Overview","text":"

Source flows represent inflows and outflows in the interior of the model domain at nodes. An entry here creates a source and assigns a time series of in/outflows to it.

"},{"location":"manual/reference/Source_Flow/#tables","title":"Tables","text":"

Example

# Description:\n# Historical\u00a0source flow at Tracy Pump\nSOURCE_FLOW\nNAME\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0NODE SIGN FILLIN FILE\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0PATH\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 \ncvp\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 181\u00a0\u00a0 -1  last\u00a0\u00a0 ${BNDRYINPUT}\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 /FILL+CHAN/CHDMC004/FLOW-EXPORT//1DAY/${HISTFLOWVERSION}/\u00a0\u00a0\u00a0 \nEND\n

The node SOURCE_FLOW table defines sources and sinks by giving them names and associating them to a node. The table also assigns a time series to the source.

"},{"location":"manual/reference/Source_Flow/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Source_Flow/#name","title":"NAME","text":"

Name assigned to the source. This is the identifier of the boundary and is referred to elsewhere in the input system. If you assign water quality you will use the same name in order to match concentration to flow.

"},{"location":"manual/reference/Source_Flow/#node","title":"NODE","text":"

Node number at which the source is applied.

"},{"location":"manual/reference/Source_Flow/#sign","title":"SIGN","text":"

Forces the time series to be a source or a sink. Positive values are normally associated with a source, but the data (especially sinks such as agricultural diversions) are sometimes measured in absolute flow. Use 1 to force the value to be a positive source or -1 to interpret values as a sink.

"},{"location":"manual/reference/Source_Flow/#fillin","title":"FILLIN","text":"

Method for filling in data if the time step of the assigned series is coarser than the time step of the model. See\u00a0fillin types

"},{"location":"manual/reference/Source_Flow/#file","title":"FILE","text":"

DSS or text file in which data are stored. Use consistent case when referring to the same file. You may also enter the word\u00a0constant\u00a0if you would like to assign a constant value to the input (the value will be entered in the next column).

"},{"location":"manual/reference/Source_Flow/#path","title":"PATH","text":"

The path within the text or DSS file of the time series data. If you used the\u00a0constant\u00a0keyword in the Input File column, enter the value (e.g.\u00a04.22) here.

"},{"location":"manual/reference/Source_Flow/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Source_Flow/#identifier","title":"Identifier:","text":"

NAME

  • Multiple sources and sinks can be assigned to a node. They are usually kept separate in order to assign different concentrations to them.
  • HYDRO is able to accept sources and sinks at boundary nodes, but this is not good modeling practice. Use them on the interior.
"},{"location":"manual/reference/Source_Flow_Reservoir/","title":"Source Flow Reservoir","text":""},{"location":"manual/reference/Source_Flow_Reservoir/#overview","title":"Overview","text":"

Reservoir source flows represent mass inflows and outflows in the interior\u00a0 of the model domain at reservoirs. An entry here creates a source and assigns a time series of in/outflows to it.

"},{"location":"manual/reference/Source_Flow_Reservoir/#tables","title":"Tables","text":"

Example

# Description:\n# Historical\u00a0source flow at Bank Pump\nSOURCE_FLOW_RESERVOIR\nNAME\u00a0RES_NAME\u00a0     SIGN\u00a0FILLIN\u00a0FILE\u00a0                              PATH\u00a0 \nswp\u00a0\u00a0clifton_court\u00a0-1\u00a0\u00a0\u00a0last\u00a0\u00a0 ../../timeseries/hist_19902012.dss\u00a0/FILL+CHAN/CHSWP003/FLOW-EXPORT//1DAY/DWR-DMS-201203/\u00a0\u00a0\u00a0 \nEND\n

The SOURCE_FLOW_RESERVOIR table defines sources and sinks by giving them names and associating them to a reservoir. The table also assigns a time series to the source.

"},{"location":"manual/reference/Source_Flow_Reservoir/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Source_Flow_Reservoir/#name","title":"NAME","text":"

Name assigned to the source. This is the identifier of the boundary and is referred to elsewhere in the input system. If you assign water quality you will use the same name in order to match concentration to flow.

"},{"location":"manual/reference/Source_Flow_Reservoir/#res_name","title":"RES_NAME","text":"

Name of reservoir at which the source is applied.

"},{"location":"manual/reference/Source_Flow_Reservoir/#sign","title":"SIGN","text":"

Forces the time series to be a source or a sink. Positive values are normally associated with a source, but the data (especially sinks such as agricultural diversions) are sometimes measured in absolute flow. Use 1 to force the value to be a positive source or -1 to interpret values as a sink.

"},{"location":"manual/reference/Source_Flow_Reservoir/#fillin","title":"FILLIN","text":"

Method for filling in data if the time step of the assigned series is coarser than the time step of the model. See\u00a0fillin types

"},{"location":"manual/reference/Source_Flow_Reservoir/#file","title":"FILE","text":"

DSS or text file in which data are stored. Use consistent case when referring to the same file. You may also enter the word\u00a0constant\u00a0if you would like to assign a constant value to the input (the value will be entered in the next column).

"},{"location":"manual/reference/Source_Flow_Reservoir/#path","title":"PATH","text":"

The path within the text or DSS file of the time series data. If you used the\u00a0constant\u00a0keyword in the Input File column, enter the value (e.g.\u00a04.22) here.

"},{"location":"manual/reference/Source_Flow_Reservoir/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Source_Flow_Reservoir/#identifier","title":"Identifier:","text":"

NAME

Multiple sources and sinks can be assigned to a reservoir. They are usually kept separate in order to assign different concentrations to them.

"},{"location":"manual/reference/Source_Tracking/","title":"Source Tracking","text":""},{"location":"manual/reference/Source_Tracking/#overview","title":"Overview:","text":"

Source Tracking is implemented using QUAL output

"},{"location":"manual/reference/Source_Tracking/#tables","title":"Tables:","text":"
  • OUTPUT_CHANNEL_SOURCE_TRACK
  • OUTPUT_RESERVOIR_SOURCE_TRACK
"},{"location":"manual/reference/Tidefile/","title":"Tidefile","text":""},{"location":"manual/reference/Tidefile/#overview","title":"Overview","text":"

The tidefile is the HDF5-formatted binary file used to pass flow and geometry date from HYDRO to QUAL and PTM. The tidefile is specified as output by HYDRO in the IO_FILE table. It is specified as input to QUAL and PTM in the TIDEFILE section. Input tidefiles can be specified only in text.

Tidefiles can be stacked if desired, but this is an old feature that is now deprecated. Stacking means that the flow simulation can be divided temporally among several HYDRO runs and then the resulting tidefiles used sequentially in QUAL or PTM.

"},{"location":"manual/reference/Tidefile/#tables","title":"Tables","text":"

Example

TIDEFILE     \nSTART_DATE END_DATE FILENAME   \nruntime    length   ${HYDROTIDEFILE} # begin run to 20JUL  \nEND \n

The following example uses one tidefile with an environmental variable for the file name. This is the most treatment.

Example

TIDEFILE     \n\n\n\n\nSTART_DATE  END_DATE   FILENAME   \nruntime     20JUL1996  hist1.h5      # beginning of run to 20JUL  \n20JUL1996   24JUL1996  hist2.h5   \nlast        length     hist3.h5      # end of previous to end of run  \n01SEP1996   length     ficticious.h5 # no error: will never be opened   \nEND \n

This example uses several tidefiles tiled together to cover a longer period. Please let us know if you need this functionality, as it is a holdover from the old \"repeating tide\" days and will probably be deprecated.

"},{"location":"manual/reference/Tidefile/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Tidefile/#start_date","title":"START_DATE","text":"

When to start using the tidefile. Tidefiles must be listed in temporal order. The START_DATE of the first tidefile must fall on or before the start of the run. The START_DATE of subsequent tidefiles must exactly coincide with the END_DATES of preceding tidefiles. There is no associated \"TIIME\" part == tidefiles must be matched on calendar days. If a START_DATE is not given or is listed as \"none\", the timestamp in the tidefile will be used for the start. There are some special keywaords that can be used with START_DATE:

  • runtime: start time in tidefile
  • last: use this tidefile when the previous tidefile ends
  • none: use default.
"},{"location":"manual/reference/Tidefile/#end_date","title":"END_DATE","text":"

When to stop using the tidefile. IF not given, the tidefile is used until it ends. The END_DATE of the last tidefile must overlap the runtime of the simulation. Note that this can be a little tricky because the ending time is the time 0000 of the END_DATE, so you may need another day. You can avoid this sort of problem by specifying your run dates with standard times (0000\u00a0 instead of military 2400). There are some special keywords that can be used with END_DATE:

  • length: use all of the tidefile, up until its end
  • none: use default.
"},{"location":"manual/reference/Tidefile/#filename","title":"FILENAME","text":"

Name of the file. Use upper/lower case consistently because filenames are case sensitive.

"},{"location":"manual/reference/Tidefile/#table-info","title":"Table Info","text":""},{"location":"manual/reference/Tidefile/#identifier","title":"Identifier:","text":"

FILENAME

ENVVARs are often used for names of files, DSS paths, parameters that are varied over a study -- the substitution will occur at runtime.

"},{"location":"manual/reference/Transfer/","title":"Transfer","text":""},{"location":"manual/reference/Transfer/#overview","title":"Overview","text":"

Transfers are direct water connections from a reservoir or node to another reservoir or node. Transfers are instantaneous movements of water (and its constituents and particles) without any detailed description of physics or storage. The Transfer View specifies the connectivity of the transfer. A time series must also be listed in the Transfer Time Series View to specify the flow -- the default is zero.

"},{"location":"manual/reference/Transfer/#tables","title":"Tables","text":"

Example

# Description:\n# Sample transfer from a reservoir to a node\n\nTRANSFER \nNAME       FROM_OBJ  FROM_IDENTIFIER TO_OBJ TO_IDENTIFIER \ntransfer_1 reservoir res_1           node   6  \nEND\n

The Transfer table defines the name and connectivity of the transfer. The flow is a time series input specified in TRANSFER_TIME_SERIES

"},{"location":"manual/reference/Transfer/#field-descriptions","title":"Field Descriptions","text":""},{"location":"manual/reference/Transfer/#name","title":"NAME","text":"

Name of the transfer. This is the identifier of the transfer used in other GUI views.

"},{"location":"manual/reference/Transfer/#from_obj","title":"FROM_OBJ","text":"

Type (node or reservoir) of the source object.

"},{"location":"manual/reference/Transfer/#from_identifier","title":"FROM_IDENTIFIER","text":"

Identifier (node number or reservoir name) of the source destination object.

"},{"location":"manual/reference/Transfer/#to_obj","title":"TO_OBJ","text":"

Type (node or reservoir) of the destination object.

"},{"location":"manual/reference/Transfer/#to_identifier","title":"TO_IDENTIFIER","text":"

Identifier (node number or reservoir name) of the destination object.

In previous versions of DSM2, Transfers were called \"obj2obj\".

To complete the specification of a Transfer, a time series or constant flow must be attached to it in the Transfer Time Series table.

"},{"location":"reference/","title":"Reference","text":""},{"location":"reference/#background-material-and-references","title":"Background Material and References","text":"

Background Material and References

"},{"location":"reference/Background_Material_and_References/","title":"Background Material and References","text":"

DSM2 Hydro is based on the FourPt computer program written by Lew DeLong in USGS. The original documentation for FourPt is available here. DSM2 adds improvements and enhancements to the FourPt model, including an improved input and output system.

DSM2 Qual is roughly based on QUAL-2E and the Branched Langrangian Transport Model (BLTM) written by Harvey Jacobson of USGS.\u00a0

"},{"location":"reference/Background_Material_and_References/#download-dsm2-versions","title":"Download DSM2 Versions","text":"

DSM2 has had many versions over the past 30 years. Some of the recent ones are available here

Downloads of DSM2 Versions

"},{"location":"reference/Background_Material_and_References/#references","title":"References:","text":"

QUAL2E Documentation - Basis for QUAL Nonconservative Constituent Kinetics.pdf D1641rev.pdf EC_chloride_bromide_05_29_01.pdf Delta D1641 Water Quality Standards Full Reference.pdf BLTMenhancements-USGSWRI97_4050.pdf Four Point memo from USGS-basis for DSM2 HYDRO.pdf

"},{"location":"reference/Background_Slide_Material/","title":"Background Slide Material","text":"

Download All

"},{"location":"reference/Background_Slide_Material/#attachments","title":"Attachments:","text":"

IIA.05.Banks pumping.kts.ppt DSM2 reservoirs and gates.ppt DSM2 Delta Applications Intro.ppt DSM2 boundary conditions.ppt DSM2 analysis tools.ppt DeltaTutorial-Planning_PermanentBarriers.doc Tutorial maps.ppt Reservoir and Gate equations.ppt

"},{"location":"reference/CALSIM_-_DSM2_Integration/","title":"CALSIM - DSM2 Integration","text":""},{"location":"reference/CALSIM_-_DSM2_Integration/#background","title":"Background","text":"

CALSIM is a water operations simulation model. It meets demands using reservoir release operations and other operational criteria. A crucial operational criterion is meeting the salinity and X2 standards in the Delta.

CALSIM relies on DSM2 simulation of water quality standards. However DSM2 is computationally expensive to run in repeated scenarios needed by CALSIM. CALSIM relies on a linear programming approach and needs flow salinity relationships to estimate the flow needed to meet a particular water quality standard. Furthermore CALSIM is a monthly model and needs to make assumptions pertaining to that limitation.\u00a0

"},{"location":"reference/CALSIM_-_DSM2_Integration/#artificial-neural-networks-anns","title":"Artificial Neural Networks (ANNs)","text":"

To make it computationally feasible, the flow salinity relationships are derived from DSM2 simulations with perturbations of inputs that are of concern to CALSIM. These flow relationship information is used as training data for Artificial Neural Networks (ANNs); more specifically Feed-forward Neural Networks (FNNs).\u00a0 These ANNs then are surrogate models for DSM2 and are supposed to represent the impact of operations on X2 and salinity standards.

"},{"location":"reference/CALSIM_-_DSM2_Integration/#full-circle-analysis","title":"Full circle analysis","text":"

To verify the results derived from having a surrogate DSM2 (ANN) model in CALSIM, the CALSIM flows and gate conditions are converted into daily inputs (with assumptions for monthly to daily) for DSM2 and the output salinity is checked against the X2 or salinity standards in CALSIM. This is called a \"full circle analysis\".\u00a0 Typically these have been done for a select period of 16 years but can be extended to the entire period of 82 years of simulation if desired.

"},{"location":"reference/CALSIM_-_DSM2_Integration/#dsm2-boundary-conditions","title":"DSM2 boundary conditions","text":"

DSM2 needs flow and stage boundary conditions, i.e. the inputs at the edges of the domain that would drive the simulation.

  1. Flow boundaries: CALSIM operates the reservoirs upstream of the Delta and as a result the flow conditions are established by CALSIM simulations, though on a monthly time step resolution.
  2. Gate positions:\u00a0 CALSIM operates these to satisfy regulations and other constraints.\u00a0
  3. Stage boundary: The only one is the ocean boundary at Martinez that is derived from astronomical stage at San Francisco with regression using historical data to transfer to Martinez (Planning tide generator)
  4. Martinez EC boundary: This is derived from a flow salinity relationship based on G model and stage boundaries (Planning Martinez EC generator)
  5. Vernalis EC boundary: Derived from flow regression equations.
  6. Consumptive Use: These are represented in DSM2 at 258 nodes, CALSIM does not directly simulate these, however they are provided as input to CALSIM based on consumptive use models
  7. Agricultural Drain EC: These are the most uncertain of the boundary conditions and are represented in DSM2 as annually repeating values.
  8. Waste water treatment plants ??
"},{"location":"reference/CALSIM_-_DSM2_Integration/#implementation","title":"Implementation","text":"

These boundary conditions are explicitly mapped in this document between the CALSIM and DSM2 schematics.\u00a0Schematics and Boundaries

"},{"location":"reference/CALSIM_-_DSM2_Integration/#resolving-monthly-daily-conversions","title":"Resolving Monthly - Daily conversions","text":"

CALSIM is a monthly time step model and DSM2 runs on 15 min or lower time steps. The input data for CALSIM is monthly averaged i.e. a single value for the entire month. DSM2 typically takes daily input values and is also capable of hourly or sub hourly resolved values.\u00a0 This mismatch has to be resolved when doing this integration.

For daily to monthly conversions, it is simply a monthly averaging technique. For certain quantities, such as gate positions, a count of values may be computed ?

For monthly to daily conversions, there is huge impedance. This means a lot of information that is lost has to be either estimated or left as the same value repeated over the days of the month. This is usually the case for the flows, except that for stability reasons ( hydrodynamic models ) the transition days between months employ a volume conserving spline to smooth the transition.\u00a0

  • Discuss daily variation issue here
"},{"location":"reference/CALSIM_-_DSM2_Integration/#version-control","title":"Version Control","text":"

CALSIM and DSM2 have different versions, evolving at different rates for different needs. As a result is important to manage these versions and the mappings between them.\u00a0Draft_CALSIMII_DCU_Modification_081809

  • What if CALSIM schematic changes? \u00a0Implication for the integration above?
"},{"location":"reference/CALSIM_-_DSM2_Integration/#notes","title":"Notes","text":"

Martinez stage has been adjusted a little bit on 24DEC1967 to overcome a dry-up breakdown at channel 201. The correction reside in a timeseries\u00a0${DSM2}\\timeseries\\Planning_Tide_82years.dss. Planning study users should add it to replace the regular timeseries.

"},{"location":"reference/CALSIM_-_DSM2_Integration/#attachments","title":"Attachments:","text":"

bat_prep.png (image/png)

"},{"location":"reference/Checklists/","title":"Checklists","text":"

Checklists are a great way to codify quality checks to avoid obvious and typical errors for a task.\u00a0

Historical simulations

Bob Suits put together a checklist used for historical simulation updates.\u00a0

Historical Simulation Checklist

"},{"location":"reference/Checklists/#planning-simulations","title":"Planning simulations","text":"
  • Need a similar checklist for planning runs\u00a0Yu (Joey) Zhou ?\u00a0
"},{"location":"reference/Checklists/#attachments","title":"Attachments:","text":"

verifying_historical_simulation_101620.pdf (application/pdf)

"},{"location":"reference/DSM2_Cloud_Setup/","title":"DSM2 Cloud Setup","text":"

DSM2 has been compiled on Linux and tested against the Windows results. The output from hydro and qual is similar thought not exact (floating point level differences)

AWS (Amazon Web Services) Cloud services have been used to run DSM2 using AWS Linux AMI (Amazon Machine Image). This requires a user to start linux VM and then download and run DSM2 on that VM.

Docker install on AWS Linux

A serverless approach to this would be that the user submits a batch job consisting of a specification of what container (Docker) to be used and a zip file with the inputs. The batch job is then run on a suitable machine and the resulting output file is zipped and uploaded to S3 (AWS Simple Storage System)

The serverless approach allows for submission of multiple concurrent jobs that provide the ability to do many parallel runs at the same time. The charges are on persecond basis making efficient use of computing resources.

The use of the cloud to run a batch DSM2 PTM is here:\u00a0How to Run a DCP PTM Batch Job on AWS

"},{"location":"reference/Historical_Simulation_Checklist/","title":"Historical Simulation Checklist","text":""},{"location":"reference/Historical_Simulation_Checklist/#verifying-an-extension-of-the-historical-simulation","title":"Verifying an Extension of the Historical Simulation","text":"

Bob Suits 10/16/2020

"},{"location":"reference/Historical_Simulation_Checklist/#verify-input-hydrology","title":"Verify Input Hydrology","text":""},{"location":"reference/Historical_Simulation_Checklist/#check-boundary-conditions","title":"Check Boundary Conditions","text":"
  • Sacramento River inflow
  • San Joaquin River Inflow
  • Sacramento River + Yolo Bypass Inflow
  • Banks pumping
  • Jones pumping
"},{"location":"reference/Historical_Simulation_Checklist/#get-observed-data","title":"Get Observed Data","text":"

Preferably get the Sacramento and San Joaquin River inflows and Banks and Jones pumping from DAYFLOW. If DAYFLOW isn\u2019t complete, get remainder daily average flow from CDEC. It needs to be independent of the DSM2 set-up.

Get reported flow at SRV (Rio Vista) and generate daily average flow.

"},{"location":"reference/Historical_Simulation_Checklist/#generate-daily-average-flow-from-dsm2-simulation-at","title":"Generate daily average flow from DSM2 simulation at:","text":"

VCU, ORI, OH4, OBD, GLC, RSAC101, RSAN115, RSAC155

"},{"location":"reference/Historical_Simulation_Checklist/#compare-daily-average-observed-flows-to-dsm2-simulated-flows-at-boundaries","title":"Compare daily average observed flows to DSM2-simulated flows at boundaries","text":"Flow CDEC Stations Operating Agency Simulated Sac River Inflow FPT USGS RSAC155 SJR Inflow RSAN112 SRV RSAC101 Banks Pumping VCU + ORI \u2013 OH4 Jones Pumping OBD + GLC \u2013 ORI Banks + Jones VCU + OBD + GLC \u2013 OH4"},{"location":"reference/Historical_Simulation_Checklist/#verify-timing-of-installation-and-removal-of-temporary-barriers-and-operation-of-montezuma-control-structure-and-delta-cross-channel-gates","title":"Verify timing of installation and removal of temporary barriers and operation of Montezuma Control Structure and Delta Cross Channel Gates","text":"
  1. Create a dss file with the observed and simulated 15-minute data. Compare observed and simulated stages just upstream and downstream of each barrier site. This would already have been done with observed data in establishing the timings by looking at observed stages. Now repeat the analysis in order to confirm that you got the operation timing correct.

    Barrier Stations to use to check \u00a0Middle River \u200bMUP and (MAB or MTB) Grant Line Canal GCT and GLE Old River OBD and (OAD or ODM) Old River at Head OH1 and SJL
  2. Compare internal daily average flows affected by gate operations

    Observed Simulated DLC DLC Delta Cross Channel NSL SLMZU025 Montezuma Slough at National Steel
  3. Check key internal flows for overall circulation of Delta waters

    Observed Simulated GSS GSS TRN TRN OBI OBI (ROLD024) MDM \u00a0Subtract RMID015-145 from RMID015-144 OH4 OH4 (ROLD034) VCU VCU OH1 OH1 OLD OLD GLE GLE
"},{"location":"reference/Historical_Simulation_Checklist/#verifying-ec","title":"Verifying EC","text":"

Get OCO\u2019s monthly updated EC estimates at: Banks, Jones, OH4, OBI to compare to Delta Modeling Section\u2019s historical simulation and reported EC.

Observed\u00a0 Simulated BANKS BANKS JONES JONES OBI ROLD024 OH4 ROLD034

Compare observed EC to simulated EC at other key locations

Observed\u00a0 Simulated ANH\u00a0 ANH (RSAN007) EMM\u00a0 EMM (RSAC092) JER\u00a0 RSAN018 MDM\u00a0 RMID015 VCU\u00a0 CHVCT000 OH1\u00a0 ROLD074 OLD\u00a0 ROLD059"},{"location":"reference/Historical_Simulation_Checklist/#attachments","title":"Attachments:","text":"

worddavdba6ec7d80d8e6d0f248fa3e9c1a9f2c.png (image/png) worddav1c660f167bf01e13f8c74d8d923445b8.png (image/png) worddavc1eb73f1cc8b69c78afeeda15ca65f9f.png (image/png) worddavffb24ad6008690b469796d092cb3e822.png (image/png) worddav7e51b7bfbf042cd6018456316774c8ac.png (image/png) worddav46b8e2022c959a0ec4979c9f7a8a9b76.png (image/png) worddav096303e2931d512411acda589b39c3cb.png (image/png) worddavac6c9b93520fd08679d0ccfdc60b7b66.png (image/png) worddav417450fd7fccb9d246170ebb7f24d040.png (image/png) worddava898e0b2ae4581b97e766bf78287b5c1.png (image/png) worddav68b2cbedb7a858f3c9b7f1a1e3ab74ac.png (image/png) worddav5b3d61166388b0d6b2f355755006cda2.png (image/png) worddav8c7ed9ee05ac17e5e96e011f3f2618cd.png (image/png) worddav1767755d061f0eab1f3ca3f72ac8cef2.png (image/png) worddavf80c76565c2ad18b4d77a1e3e7d95206.png (image/png) worddavcbc13ea92d122ff1199746fedcb5d095.png (image/png) worddavbec1a14c56fb496c1411aa3f37418f8f.png (image/png) worddav7b88059002b010dabef7c4b1535bb124.png (image/png) worddav5bb8f683823e3bf73c38767313d66e71.png (image/png) worddavc305fa1a938290814fd2248549ba4430.png (image/png) worddavdfbad699bc5178a523225eb4db224138.png (image/png) worddav4ea5b554fecf90d7309db37a565d3a27.png (image/png) worddav97cef626ad442220331e25e5e8e8107f.png (image/png) worddave19a27a3183a9cd911129f634e5785e1.png (image/png) worddavcafee77d96b6996a2ba1b263d685acf2.png (image/png) worddav586bb64e084e848f881c6b85898711d9.png (image/png) worddav06a87c364207da504d16cfe87f616b71.png (image/png) worddavfed7fe74c73acf86c9270728c405a08c.png (image/png) worddav1b9bd5a7d0e6c5fe479a6a2f41178b3d.png (image/png) worddav252bc8a83f17f3aaf5122fc24dc34a7b.png (image/png) worddavaefc8f4547e26bc30a4e1e2e6d9649e4.png (image/png) worddav43dc9470b95f7adadead62075f065c27.png (image/png) worddav44b58de538e094cc6f56fa252e0b3775.png (image/png) worddavf81ccfffae04537644dbc328afa65327.png (image/png) worddavfb16076aa13e127fd8eb1d3b049dec6e.png (image/png) worddav9a6b20dc0a73de6bf9dab0456647125f.png (image/png) worddav230f2ffdb48f91fc3c31cb101e946de0.png (image/png) worddave2ddaa02d082a0f4038f1aa1cae93a47.png (image/png) worddav41ab2bbaa193e682b05098057d919df2.png (image/png) worddavcb34498bae50e99b02ddf21594a946c0.png (image/png) worddav248f48ce932b6488517e2109896fdb56.png (image/png) worddave25679296b7bc483b7fb1156d722e8b6.png (image/png) worddav44d59b8b5a711866ad8dde94f0d42d27.png (image/png) worddav660ebda068b8ef166732e0aef200cc6e.png (image/png)

"},{"location":"reference/Organizing_a_Study/","title":"Organizing a Study","text":""},{"location":"reference/Organizing_a_Study/#overview","title":"Overview","text":"

The DSM2 installation directory is as follows:

/dsm2\n  /bin            # where the executable files are\n  /common_input   # a repository of common input files\n  /studies\n     [empty]      # DO YOUR WORK HERE (if inside the distribution)\n  /study_templates    # ...and NOT here\n     /historical\n     /ocap_sdip\n        config_ocap_sdip.inp\n        hydro.inp\n        qual_ec.inp\n        /timeseries\n  /timeseries     # Archivable time series (historical\n                  # and series used as basis for preprocessing)\n  /tutorials\n
"},{"location":"reference/Organizing_a_Study/#explaining-the-directory-structure","title":"Explaining the directory structure","text":""},{"location":"reference/Organizing_a_Study/#bin","title":"bin","text":"

\"Bin\" stands for \"binary\" and refers to the executables. When you installed dsm2, your path variable got set to the new distribution.

"},{"location":"reference/Organizing_a_Study/#common_input","title":"common_input","text":"

The common input directory is a repository of files that are used in the standard templates. These files start with the name of the (parent) object, then the \"layer\" name then the version date. The templates refer to this directory often. You do not have to maintain these links, but please do not edit the files... the\u00a0Layering\u00a0system should help with this.

"},{"location":"reference/Organizing_a_Study/#study_templates","title":"study_templates","text":"

This directory houses samples for historical and planning runs. They represent our latest setup as of the distribution. As the templates are updated they may point to newer files in\u00a0common_input.

"},{"location":"reference/Organizing_a_Study/#studies","title":"studies","text":"

A study is an associated group of simulations, which might involve any combination of DSM2 modules. Often the study compares several different alternatives. Many DSM2 modelers prefer to house different alternatives in different folders, but there are good reasons to house them in one study folder and just use different configuration files. To get started you will typically copy one of the study_template sub-directories to the /study folder. Don't change the ones in study_templates!

"},{"location":"reference/Organizing_a_Study/#timeseries","title":"timeseries","text":"

The\u00a0timeseries\u00a0directory contains the timeseries you will need in the regular course of working with DSM2. Since the data that are most reusable are historical and DICU, that is most of what you will find here. We don't recommend putting study-specific files (e.g. CALSIM output) in this directory.

"},{"location":"reference/Organizing_a_Study/#tutorials","title":"tutorials","text":"

The\u00a0tutorials\u00a0directory is a workspace for using the tutorials. It is a lot like /studies in the sense that you will copy templates here.

"},{"location":"reference/Planning_Simulation_Checklist/","title":"Planning Simulation Checklist","text":""},{"location":"reference/Planning_Simulation_Checklist/#model-preparation","title":"Model Preparation","text":"

Get Input Data from Calsim

  • Rename Calsim output as dv.dss and put in timeseries\\CALSIM\\
  • Preprocess Calsim output to DSM2 inputs

Check Boundary Conditions

  • Sacramento River inflow
  • San Joaquin River Inflow
  • Sacramento River + Yolo Bypass Inflow
  • Banks pumping
  • Jones pumping

Martinez Stage

  • Planning stage
  • Sea Level Rise stage ()

Martinez EC

  • Martinez EC generator

Consumptive Usage

  • DCD planning (which is also input for Calsim3)

Gate Operation

  • DCC
  • Montezuma Gate
  • Clifton Court Forebay Gate
"},{"location":"reference/Planning_Simulation_Checklist/#model-run","title":"Model Run","text":"

Binary Versions

  • suggest using relative path and .bat

Running Time Window

  • 16-year: 1975/10 - 1991/9
  • 94-year:\u00a01975/10 - 1991/9
  • start a few months earlier for warming-up
"},{"location":"reference/Planning_Simulation_Checklist/#postprocess-visualization-usage","title":"Postprocess, Visualization, Usage","text":"

Notebook

Water Quality Standard

"},{"location":"reference/Post_processing_and_Visualization/","title":"Post processing and Visualization","text":"

DSM2 writes out model information in HEC-DSS format and HDF5 format. Output requested from the model is written to HEC-DSS while model input and state is recorded in HDF5 files.

Vista is the standard tool for accessing both these kinds of information for DSS. Vscript is the associated scripting tool which leverages the Python language with the HEC-DSS and HDF5 java libraries.

"},{"location":"reference/Presentations/","title":"Presentations","text":""},{"location":"reference/Presentations/#introduction","title":"Introduction","text":""},{"location":"reference/Presentations/#installation","title":"Installation","text":""},{"location":"reference/Presentations/#attachments","title":"Attachments:","text":"

Cool Tips.ppt PTM_2_LowPumpingTempBar.avi p0_template.pptx or2_sdip_oprules.ppt or2_sdip_op.ppt or1_op_rule.ppt DSM2V8 agenda 900 class.docx DSM2V8 agenda 830 class.docx DSM2_class_list_900.docx DSM2_class_list_830.docx DSM2 Version 8 Class Syllabus.docx d2_ptm.pptx d1b_nonconservative.pptx d1_historical.pptx d0_delta_applications.ppt b6_op_rule.ppt b5_source_tracking.ppt b4_simulation_data.pptx b3_input_system_layering.pptx b2_reservoir_gate_transfer.ppt b1_channel.pptx a4_dsm2_version8.ppt a3_dsm2_user_group.pptx a2_installation.pptx a1_introduction.pptx d4_move_archive_batch.ppt d3_planning.ppt

"},{"location":"reference/RKI_Referenced_Output/","title":"RKI Referenced Output","text":"

River Kilometer Index is a way to index locations along a river by measuring the distance in kilometers from the downstream end of the river, e.g. RSAC054 is 54 km from the Golden gate bridge, the most downstream discernible reach of the Sacramento River

In light of current GIS information this practice should be superseded by exact latitude,longitude coordinates, however it is resilient in its use due to legacy reasons. It is also useful in terms of physical processes which are related to the distance along the river rather than coordinates in a general space

The table below comes from DSM2 v6 (last referenced by J. Anderson)\u00a0

NAME CHAN DIST COMMON_NAME BYOLO040 399 0 # Yolo Bypass\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CFTRN000 172 727 # Turner Cut\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CHCCC006 247 0 # Contra Costa Pumping Plant / Rock Slough\u00a0\u00a0\u00a0\u00a0\u00a0 CHDMC004 216 2000 # DMC\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CHDMC006 216 0 # DMC @ Tracy Pumping Plant\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CHGRL005 211 1585 # Grant Line Canal (West Position)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CHGRL009 207 36 # Grant Line Canal (East Position)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CHGRL012 204 1672 # Grant Line Canal @ Head\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CHSAC030 392 23614 # Sacto. Ship Channel\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CHSAC031 392 20661 # Sacto. Ship Channel\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CHSWP003 82 length # Clifton Court Forebay (gates)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CHVCT000 229 1328 # Victoria Canal\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CHWST000 232 3084 # Clifton Court Forebay Entrance\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CRGRV002 455 0 # Green Valley Creek\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 CRSUS004 457 0 # Suisun Creek @ Cordelia Rd.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 LSHL001 281 113 # Sac. @ Lake Sherman\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 LSHL003 299 5145 # SJR @ Mayberry Cut\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RCAL009 21 0 # Calaveras River at Stockton\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RCSM075 549 2501 # Cosumnes River\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RFAL008 276 5648 # FALSE River @ Webb Tract\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMID005 156 140 # Middle River\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMID007 248 665 # Middle River\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMID015_144 144 838 # Middle River\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMID015_145 145 2114 # Middle River (same as #144)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMID023 135 719 # Middle River @ Borden Hwy\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMID027 133 3641 # Middle River @ Tracy Blvd\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMID040 126 3951 # Middle River @ Mowery Bridge\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMID041 125 1700 # Middle River @ Old River\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMKL005 374 5030 # North Fork Moke. River (Georgiana Sl.)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMKL019 357 694 # North Fork Moke. River\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMKL027 334 350 # Moke. River @ Thornton\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMKL032 550 2617 # Moke. River near Thornton\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RMKL070 550 0 # Moke. River @ Woodbridge\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 ROLD014 117 0 # Old River @ Holland Cut\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 ROLD024 106 2718 # Old River @ Bacon Island (near CCC)\u00a0\u00a0\u00a0\u00a0\u00a0 ROLD034 90 3021 # Old River near Byron\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 ROLD040 82 2609 # Old River @ Clifton Court Ferry\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 ROLD046 80 1431 # Old River\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 ROLD047 79 2766 # Old River\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 ROLD059 71 3116 # Old River @ Tracy Road\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 ROLD074 54 735 # Old River @ Head\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC054 441 length # Martinez (MRZ)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC056 441 3119 # Martinez at Benicia Bridge\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC064 452 190 # Port Chicago\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC075 437 11108 # Mallard Island (MAL)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC077 437 1870 # Pittsburg (PTB)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC081 436 5733 # Collinsville\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC084 435 9662 # Sac River near Sherman Lake\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC092 434 435 # Emmaton\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC101 430 9684 # Rio Vista (RIV)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC123 423 1358 # Sac near Georgiana Slough\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC128 421 8585 # Sac above DCC\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC139 418 4814 # Sac @ Green's Landing\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC140 418 0 # Sac @ Snodgrass Sl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC142 417 5496 # Sac @ Hood\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAC155 414 11921 # Sac @ Freeport\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN002 53 4276 # Mouth SJR\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN003 285 1700 # SJR @ Sherman Lake\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN007 52 366 # SJR @ Antioch\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN008 52 0 # Lone Tree Way @ Hwy. 4 near Antioch\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN014 49 9570 # Blind Point\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN018 83 4213 # Jersey Point\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN024 47 8246 # SJR @ Bradford Isl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN032 349 9672 # San Andreas Landing, NOTE: RSAN032 water comes from Moke. R, hence the model location is on Moke. MM, 2000.09.06\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN037 42 286 # SJR @\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN040 38 3526 # SJR\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN043 319 8571 # SJR @ Venice Isl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN046 31 5628 # SJR between Turner & Columbia Cut\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN052 24 2643 # SJR @ Rindge Pump\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN058 20 2520 # SJR @ Stockton Ship Channel\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN063 14 3281 # SJR @ Stockton\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN072 10 9400 # SJR @ Brandt Bridge\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN087 6 3930 # SJR @ Mossdale\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSAN112 17 4744 # SRJ @ Vernalis\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSMKL008 344 7088 # South Fork Moke @ Staten Island\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 RSMKL024 337 971 # South Fork Moke @ New Hope Bridge\u00a0\u00a0\u00a0\u00a0\u00a0 SLBAR002 406 0 # Barker Slough / North Bay Aqueduct\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLCBN001 477 4000 # Chadbourne Sl. (Hollywood Club)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLCBN002 477 0 # Chadbourne Sl. (Sunrise Club)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLCCH016 402 0 # Cache Slough\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLCRD000 471 7216 # Cordelia Sl. (Miramonte)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLCRD003 474 3754 # Cordelia Sl. (Cygnus)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLCRD006 469 4776 # Cordelia Sl. (Ibis)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLCRD009 468 11200 # Cordelia Sl. (Garibaldi)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLDUT007 274 7351 # Dutch Sl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLDUT009 273 4026 # Dutch Sl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLFHN002 479 2640 # Frank Horan Sl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLGYR003 501 0 # Goodyear Sl. (Morrow Island)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLGYR008 473 1955 # Goodyear Sl. (Ghost Fleet)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLHIL002 486 4615 # Hill Sl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLIND005 239 0 # East CC Pumping Plant (Discovery Bay)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLMAY002 283 1611 # Mayberry Sl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLMID001 289 5441 # Middle Sl. @ Winters Island\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLML001 443 4599 # Mallard Sl. (CCWD)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLMZU003 523 0 # Montezuma Sl. @ Hunter Cut\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLMZU011 517 7662 # Montezuma Sl. @ Beldon's Landing\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLMZU025 513 13883 # Montezuma Sl. @ National Steel\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLMZU029 513 1346 # Montezuma Sl. @ Roaring River\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLMZU032 511 1677 # Montezuma Sl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLNY002 288 2005 # New York Sl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLPPR000 268 4735 # Piper Sl. @ Bethel Tract\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLPPR003 269 8333 # Piper Sl. @ Bethel Isl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLRAR000 529 1250 # Roaring River\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLRAR009 532 7375 # Roaring River @ Sprig\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLRCK005 247 221 # Rock Slough (CCC)\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLSBT011 385 2273 # Steamboat Sl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLSUS012 461 9982 # Suisun Sl. @ Volanti\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLTMP000 194 1946 # Tom Paine Sl. Intake\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLTMP017 185 0 # Tom Pain Sl.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 SLTRM004 310 540 # Three Mile Sl. @ SJR"},{"location":"reference/Scripts_and_input_files/","title":"Scripts and input files","text":""},{"location":"reference/Scripts_and_input_files/#folder-structure","title":"Folder Structure","text":"

Most preprocess scripts are originally located at ${DSM2}/scripts/, while it also needs DSM2 config file and input timeseries (from CalSIM) to run.

Fig. Sample batch preprocess from CalSIM II to DSM2

Some\u00a0key functions\u00a0are explained below:

  • config file usually defines the time window, CALSIMNAME, DSM2MODIFIER, which need to be consistent with the scripts and timeseries files.
  • planning_boundary_flow contains method 'smooth_flow' to 'tension spline' boundaries Sac and Vernalis from monthly to daily-interval data.
  • DICU are directly transferred as monthly-interval data.
  • prep_ec generate Martinez EC from its astro-planning stage and NDO (modified from G-model and has a newly calibrated version). Refer to Martinez EC Generator\u00a0for its introduction and updates.
"},{"location":"reference/Scripts_and_input_files/#to-run","title":"To run","text":"

type prepro*.bat config*.inp at the scenario path in command window

*usually we start preprocess earlier (like 1 month) than hydro + qual

"},{"location":"reference/Scripts_and_input_files/#cwf-preprocess-version-in-dsm2-v806","title":"CWF preprocess version in DSM2 v806","text":"

CH2M helped creating an updated version for project 'California Water Fix' (CWF), whose existing condition (EST)\u00a0 and No Action Alternative (NAA) are widely used as templates for DSM2 planning study.

Figure. Sample directory of CWF scenario for DSM2 planning modeling

Compared to the original scripts package, CWF version has

  • a sub-folder 'scripts' under scenario. It contains updated scripts to replace those under ${DSM2}/scripts/.
  • a sub-folder 'input' under scenario. It contains updated hydro and qual grids.
  • a sub-folder 'timeseries' under scenario. It contains updated DSM2 input files (*daily.dss for boundaries, *.dss for Martinez stage, DICU, oprule, QUAL, *qa.dss for QAQC)
  • the above input files are generated from\u00a0its 'CALSIM' subfolder, with *DV.dss (CalSIM outputs) and *SV.dss (CalSIM inputs) required by\u00a0the current CalSIM II preprocess (CalSIM 3 plans to keep only *DV.dss)

Figure. CWF version's batch preprocess from CalSIM II to DSM2

More details in the scripts:

  • planning_boundary_flow contains method 'smooth_flow' to 'tension spline' boundaries Sac and Vernalis from monthly to daily-interval data.
  • CWF dailymapping processes all boundaries and source flows to daily-interval data.
  • prep_ec has updated changes with sea level rise adjustment
  • Vernalis Adaptive Management Plan (VAMP) of the original scripts is not active\u00a0anymore
  • Source flows and intakes are added/modified
  • Method 'daily mapping' to process all boundaries/source inputs to daily-interval data
"},{"location":"reference/Scripts_and_input_files/#attachments","title":"Attachments:","text":"

bat_prep_orig.png (image/png) bat_prep_cwf.png (image/png) CWFtimeseries.JPG (image/jpeg) CWFcalsimfiles.JPG (image/jpeg) CWFdir.JPG (image/jpeg) bat_prep.png (image/png) prep_doc_bst.py (application/octet-stream) planning_ec_mtz_bst.py (application/octet-stream) planning_boundary_flow_bst.py (application/octet-stream) extend_calsim_outputs_bst.py (application/octet-stream) expand_seasonal_bst.py (application/octet-stream) dailymapping_051010.py (application/octet-stream) prep_ec_bst.py (application/octet-stream) prepro_BST_Existing.bat (application/octet-stream)

"},{"location":"reference/Supporting_Tools_and_QAQC/","title":"Supporting Tools and QAQC","text":"

Currently, all the preprocess scripts are written with DSM2-Vista. It's recommended to use the most updated version at https://github.com/CADWRDeltaModeling/dsm2-vista\u00a0(not the one included in DSM2 package). * Note to change the relevant environment variables.

1.

It is good practice to review and compare CalSIM outputs and its preprocessed results (*.dss)\u00a0before running DSM2. HEC-DSS and DSM2-Vista\u00a0are the most widely used tools.

http://www.hec.usace.army.mil/software/hec-dss/

2.

WRIMS' report tool is useful to compare\u00a0CalSIM outputs, i.e. DSM2 inputs\u00a0(timeseries\\CALSIM\\DV.dss)

https://www.water.ca.gov/Library/Modeling-and-Analysis/Modeling-Platforms/Water-Resource-Integrated-Modeling-System

*\u00a0Note to keep consistence in the\u00a0pathnames, time windows, etc\u00a0between the comparison scenarios.

3.

Another good tool to compare between scenarios dss (in general) is\u00a0the scripts Compare DSS Tool in DSM2-Vista.

${vista}\\bin\\compare_dss.bat

or a simplified version to check consistence\u00a0${vista}\\bin\\compare_dss_files.bat

*\u00a0One good practice is to locate changes first (maybe the big ones), then use compare_dss.bat to specify and illustrate them.

4.

A quick way to check accuracy of preprocess is to compare Net Delta Flow (NDO) = inflows-outflow-CU

"},{"location":"reference/Supporting_Tools_and_QAQC/#attachments","title":"Attachments:","text":"

compareDSSfiles.JPG (image/jpeg) compareDSS.JPG (image/jpeg) wrimsReport2.JPG (image/jpeg) wrimsReport1.JPG (image/jpeg) wrimsReport.png (image/png)

"},{"location":"reference/Update_DSM2_historical_simulation/","title":"Update DSM2 historical simulation","text":""},{"location":"reference/Update_DSM2_historical_simulation/#introduction","title":"Introduction","text":"

The historical simulation of Delta Simulation Model II (DSM2) simulates the ground truth of Sacramento-San Joaquin Delta(Delta) hydrodynamics and water quality. It requires collecting the observed flows, stages, and water quality at the model boundaries and the actual gate operations as much as possible, converting them into the data format DSM2 recognizes, and conducting quality assurance and quality control (QA/QC). The missing inputs must be filled to make DSM2 run successfully. This document directs all the procedures of preparing the inputs for DSM2 historical simulation. All the required scripts and information for updating DSM2 historical simulation have been included in the package. The package\u00a0has been saved\u00a0in the shared\u00a0folder \\nasbdo\\Delta_Mod\\Share\\lanliang\\Update_DSM2_package. Some links might not be effective\u00a0on this page, but work well in the document Update DSM2 Historical Simulation.docx in the shared folder.

"},{"location":"reference/Update_DSM2_historical_simulation/#prerequisites","title":"Prerequisites","text":"

*1.\u00a0\u00a0\u00a0\u00a0\u00a0*\u00a0Follow the instructions here to add the option 'Open command window here' to the Windows Explorer context menu. You will need administrative privileges to do this, and you should only do this if you are comfortable modifying the registry in Windows 10. This will allow you to open a command window by right clicking on a folder in Windows explorer. DSM2 models and Python scripts can be run in the command window.

*2.\u00a0\u00a0\u00a0\u00a0\u00a0*HEC-DSSVue:\u00a0 Download and install HEC-DSSVue. It is essential for visualizing and examining data and converting data into the required formats.

3.\u00a0\u00a0\u00a0\u00a0\u00a0Vtools: Download and install Vtools. It is the tools to process the DSS data. Its functions are called by the python scripts to update DSM2 historical simulation.

*4.\u00a0\u00a0*\u00a0\u00a0Python: Download and install Python 2.7, the version compatible with Vtools, from the websites of Python or Anaconda.

"},{"location":"reference/Update_DSM2_historical_simulation/#the-schematic-of-updating-procedures","title":"The schematic of updating procedures","text":"

The schematic indicates the four major steps for updating the inputs for a DSM2 simulation of historical Delta hydrodynamics and electrical conductivity (EC). Each step consists of collecting part of data and conducting QA/QC. The technical specification of the procedure has been described below. Once the four steps for updating the DSM2 historical simulation\u00a0are conducted, the latest version of\u00a0the DSS files (DICU_YYYYMM.dss, histYYYYMM.dss, and gates-v8-YYYYMM.dss) are generated.\u00a0Copy these three DSS files in the DSM2 subfolder \\timeseries and the DSM2 historical simulation update is completed.\u00a0

"},{"location":"reference/Update_DSM2_historical_simulation/#step-1-dicu-update-under-the-folder-dicu","title":"Step 1: DICU Update (under the folder /DICU)","text":"

1.\u00a0\u00a0\u00a0\u00a0\u00a0 Retrieve CIMIS data

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Log in CIMIS with the user name wildej and password delta, and click DATA and then My Reports.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Under the list Quick Reports, click Execute monthly Report, List 1, and then the report is loaded in an Excel spreadsheet. This report includes the precipitation and reference ET and other climatic data at stations Davis(6), Brentwood(47), Manteca(70), Twitchell(140), Lodi West(166), Tracy(167). Right now, the data at Lodi West are missing on that spreadsheet.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Log off CIMIS and close the browser.

2.\u00a0\u00a0\u00a0\u00a0\u00a0 Prepare precipitation and evapotranspiration data in DICU-YYYYMM.xlsm

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Delete the second and third columns in the downloaded spreadsheet, and copy the data intoDICU-YYYYMM.xlsm\u00a0 on CIMISData sheet with the same format.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Check data on PrecipLookupToDSS sheet are correctly linked to the data on CIMISData sheet. Sometimes, the data is not available in some months or some stations on CIMISData. When the downloaded spreadsheet is pasted on the CIMISData sheet, the precipitation and reference ET on CIMISData will be automatically copied to another sheet PrecipLookupToDSS. In order to make the data automatically transfer from CIMISData to PrecipLookupToDSS, the rows of the missing data in CIMISData have to be filled with blanks.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Extend the data on TimeSeries sheet, and keep the same formulas on each column. The long-term mean values in the columns, such as \u201cMean Evap-ET\u201d, \u201cDICU Ave Evap(mm)\u201d and \u201cHist Ave Evap\u201d, could repeat the same values as those in the previous year.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Save the spreadsheet. Do not close the file. It will be used for updating the precipitation and reference ET for the DICU model.

3.\u00a0\u00a0\u00a0\u00a0\u00a0 Update the input files in DICU model

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Go to DICU/Precip/7STATION-DAT-Y2K , open the Precip input file 7STAPREC.WY20XX. The file saves the monthly precipitation in the water year 20XX at seven stations: Davis, Rio Vista, Brentwood, Tracy, Stockton, Lodi and Galt. CIMIS has not collected the precipitation data at Galt for recent several decades. Update the precipitation with the same station names from the spreadsheet PrecipLookupToDSS, and copy the same precipitation at Lodi into the column of Galt.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Starting from WY2015, the Lodi West data from CIMIS is missing, so the Lodi precipitation after April 2015 is downloaded from\u00a0 the National Centers for Environmental Information, University of California Agriculture & Natural Resources (UCANR). Pasted the downloaded Lodi precipitation on the spreadsheet PrecipLookupToDSS.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Update the Precip file located at DICU/PRECIP/7STATION-DAT-Y2K . If some of the downloaded Lodi data are missing, the precipitation at Stockton can be taken as the substitute, since Stockton is the nearest available station to Lodi.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Save the Precip file. (If starting a new water year, make sure to copy the file for the next water year with the appropriate name of the next water year + 1 so the average precip information will not be lost!)

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Water year type. Go to the website to get the water year type, and go to the folder DICU/DICU5IN1/1922-20**(the current year to update), and update WYTYPES file with textpad.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Pan evaporation. Go to DICU/PAN_EVAP, update README-2YYY.txt file with the Manteca pan evap (in and mm), AVE EVAP of DICU_YYYYMM.xlsm and ET ADJ FACTR from the \u201cTimeSeries\u201d worksheet of DICU_YYYYMM.xlsm to keep a record of the data used.

- \u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Update DICU5.5 txt file. Go to DICU/DICU5/1922-20**, update DICU5.5 txt file at the bottom. You will want to update the water year type and the ET adjustment factors which are in rows where the values are from column in the \u201cTimeSeries\u201d worksheet of DICU_YYYYMM.xlsm. Also remember this is by water year and the data spacing must not change.

4.\u00a0\u00a0\u00a0\u00a0\u00a0 Run DICU

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Go to DICU/DICU5IN1/1922-20**, update dicu5in1.py. The lines marked with \u201cUpdate here!\u201d must be updated. Open the command window, and run:

python dicu5in1.py

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Go to DICU/DICU5/1922-20**, update and run the python script run-dicu5.py. The lines marked with \u201cupdate folder name\u201d must be updated. Open the command window, and run:

python run-dicu5.py

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Go to DICU/NODCU/NODCU12/1922-20**/, update and run the python script bat1922-20**.py. The lines marked with \u201cupdate folder name\u201d, \u201cupdate the year\u201d, and \u201cupdate the month\u201d must be updated. Open the command window, and run:

python bat1922-20**.py

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 \u00a0Copy the file DICU_YYYYMM.dss from /DICU/NODCU/NODCU12/1922-20**/ to the folder /timeseries of the DSM2 historical simulation.

"},{"location":"reference/Update_DSM2_historical_simulation/#step-2-dsm2-boundary-inputs-update-under-the-folder-dsm2_flow_stage_ec_input","title":"Step 2: DSM2 Boundary Inputs Update (under the folder /DSM2_flow_stage_EC_input)","text":"

1.\u00a0\u00a0\u00a0\u00a0\u00a0 Boundary inputs from CDEC

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Most DSM2 boundary data can be retrieved from CDEC. The python script Retrieve_data_fromCDEC.py can retrieve the data of CDEC stations as an input text file defines, and write the retrieved data into a DSS file.

The input text file is a queue of the information of the requested CDEC stations, and each line in the file contains the station ID, sensor number, and duration of one CDEC station.

The data in the DSS file is the raw data with data gaps frequently. To conduct writing the timeseries in the DSS file without interruptions, they are defined as irregular timeseries.

Get a command prompt window under the folder /DSM2_flow_stage_EC_input, and type the line below to retrieve the DSM2 boundary data.

Python Retrieve_data_fromCDEC.py arg1 arg2 arg3 arg4

where

Arg1 \u2013 The first argument, the text file of the CDEC station information. To retrieve DSM2 boundaries, Arg1 is Delta_boundaries.txt. The DSM2 boundary flows, stages, and ECs, from CDEC are included in this text file. Table 1 lists the DSM2 boundaries, and their corresponding CDEC stations and related information, which the text file has included. CDEC does not have the DSM2 boundary, Mokelumne River inflow.

Arg2 \u2013 The second argument, the starting date of the data, formatted as mm/dd/yyyy.

Arg3 \u2013 The third argument, the ending date of the data, formatted as mm/dd/yyyy.

Arg4 \u2013 The fourth argument, the name of the DSS file that stores the retrieved data.

Below is an example to download the DSM inputs from 1/1/2017 through 12/31/2017 and write the retrieved data into a DSS file named as update201712.dss.

Python Retrieve_data_fromCDEC.py Delta_boundaries.txt 1/1/2017 12/31/2017 update201712.dss

Table 1 The CDEC stations to retrieve data as the DSM2 inputs

CDEC station RKI name DSM2 input type Input location Time interval LIS BYOLO040 flow Yolo Bypass 1 DAY YBY BYOLO040 flow Yolo Bypass 1 DAY VNS RSAN112 flow San Joaquin River at Vernalis 1\u00a0DAY FPT RSAC155 flow Sacramento River at Freeport 1\u00a0DAY NHG RCAL009 flow Calaveras\u00a0River at Stockton 1\u00a0DAY MHB RCSM075 flow Cosumnes\u00a0River at Michigan Bar 1\u00a0DAY HRO CHSWP003 export Banks\u00a0puming 1 DAY TRP CHDMC004 export Jones\u00a0(Tracy) pumping 1 DAY BKS SLBAR002 export North\u00a0Bay Aqueduct 1\u00a0DAY CCW CHVCT001 export Middle\u00a0River pumping 1\u00a0DAY IDB ROLD034 export Old\u00a0River pumping near Discovery Bay 1\u00a0DAY INB CHCCC006 export Rock\u00a0Slough pumping near Brentwood 1\u00a0DAY MRZ RSAC054 stage Martinez 15 MIN MRZ RSAC054 EC Martinez 1\u00a0HOUR SRH RSAC139 EC Sacramento\u00a0River at Hood 1\u00a0DAY VER RSAN112 EC Vernalis 1\u00a0DAY

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 QA/QC of all the data in the DSS file except Martinez stage and EC. Conduct QA/QC and remove errors for those timeseries. Martinez stage and EC will be processed independently after Step 2.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Use HEC-DSSVue to fill the data gaps. HEC-DSSVue can automatically fill the gaps with several time steps missing by interpolation when converting irregular to regular timeseries. There is another way to fill the gaps with longer intervals. Go to the menu of HEC-DSSVue, and click: Tools->Math Functions->General->Estimate missing values.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Use HEC-DSSVue to convert the irregular timeseries into regular timeseries. Go to the menu of HEC-DSSVue, and click: Tools->Math Functions->Time Functions->select operator: min/max/avg/\u2026over period -> select function type: average over period -> select new period intervals as Table 1 -> Compute->save with the default pathnames.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Copy and paste the file update201712.dss with regular timeseries into the folder /merge_data.

2.\u00a0\u00a0\u00a0\u00a0\u00a0 Retrieve ancillary data from CDEC

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Call the same Python script Retrieve_data_fromCDEC.py to retrieve extra data for QA/QC, filling data gaps at Martinez, and checking the gates operation times. The CDEC stations for fulfilling those functions are listed in the text file, ancillary_stations.txt. Below is an example to download the ancillary data from 1/1/2017 through 12/31/2017 and write the retrieved data into a DSS file named as ancillary201712.dss.

Python Retrieve_data_fromCDEC.py ancillary_stations.txt 1/1/2017 12/31/2017 ancillary201712.dss

*3.\u00a0\u00a0\u00a0\u00a0*Download data from other sources

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 San Francisco stage for updating Martinez stage

Go to the website: http://tidesandcurrents.noaa.gov/waterlevels.html?id=9414290

Choose the options:

Units: Feet

Timezone: LST

Datum: MLLW

Interval: Hourly

Update: Data Only

When the data list shows on the screen, click the button: Export to CSV, to save the data in a csv file. Load the csv data in HEC-DSSVue, and convert its datum from MLLW to NGVD by NGVD = MLLW - 2.64 feet

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Yolo Bypass Flow

Yolo Bypass is a wide-open area, so it is hard to investigate the actual flows in this region. There is no flow station at the DSM2 boundary location. As a boundary flow in DSM2, Yolo Bypass flow has been assumed to equal the aggregation of the flows collected from those stations near Yolo Bypass, like the Yolo Bypass flow (QYOLO) from DAYFLOW as

QYOLO = Yolo Bypass flow at Woodland + Sacramento Weir Spill +\u00a0South Fork Putah Creek flow

These three flows can be retrieved from CDEC station YBY, USGS station 11426000(SACRAMENTO WEIR SPILL TO YOLO BYPASS NR SAC CA), and CDEC station PUT.\u00a0 The last two stations have been inactive for recent years, so YBY is the unique effective station collecting the Yolo Bypass flow.

However, DSM2 v6.0 historical simulation update tool took the flow at CDEC station RUM (Cache Creek at Rumsey Bridge) as the Yolo Bypass flow. It is not appropriate, especially the inflow to the Sacramento River during summers. Generally, during summers and falls Yolo Bypass has toe drain instead of inflow to the Sacramento River, and during winters and springs it functions as a diversion to reduce the Sacramento River floods.

DSM2 input data version is based on the timeseries ending time. Starting from the version December 2017 (12/2017), the RUM flow from 2006 through current has been replaced by the available observed flows at YBY and LIS. The old input data versions keep the RUM flow.

Furthermore, from the version 12/2017, CDEC station LIS flow is taken as the Yolo Bypass flow from June to November every year, while CDEC station YBY flow is taken as the Yolo Bypass flow from December to next May. If the Sacramento River floods diverted into Yolo Bypass come earlier than December or after May, YBY flow might be accounted as the Yolo Bypass flow of the DSM2 inputs in those months.

The combination of YBY and LIS flows in the file update201712.dss under the folder /DSM2_flow_stage_EC_input is conducted in HEC-DSSVue. Open update201712.dss in HEC-DSSVue, set the time window December 1st -May 31st, select the daily YBY flow, duplicate the daily YBY flow and rename it as /CDEC/LIS/FLOW//1DAY/20_E/. It overwrites the daily LIS flow from December through May. After that, the daily LIS flow represents Yolo Bypass flow and will be merged with the previous version of DSM2 inputs.\u00a0

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Mokelumne River flow

Contact the staff in East Bay Municipal Utility District (EBMUD) directly. Their website only presents the flow at Mokelumne River below WID for the last seven days:

http://www.ebmud.com/water-and-drought/about-your-water/water-supply/water-supply-reports/daily-water-supply-report/

Kevin Fung kevin.fung@ebmud.com has been contacted for the past several years and provided the raw data of the current year.

The QA/QC\u2019d Mokelumne River flow of the previous years, which EBMUD sent to USGS, can be downloaded from USGS website.

Once Mokelumne River flow is received, use HEC-DSSVue to load and save it in the DSS file /DSM2_flow_stage_EC_input/updateYYYYMM.dss. Here MM and YY/YYYY are the month and year of the updated version, and set the pathname of this time series as

/FILL+CHAN/RMIL070/FLOW//1DAY/DWR-DMS-YYYYMM/

"},{"location":"reference/Update_DSM2_historical_simulation/#step-3-martinez-boundaries-update","title":"Step 3: Martinez boundaries Update","text":"

1.\u00a0\u00a0\u00a0\u00a0\u00a0 Martinez Stage (DATUM: NAVD88; under the folder /MTZ_stage_EC/fill_stage)

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Remove stage data errors. Copy Martinez CDEC stage from\u00a0/DSM2_flow_stage_EC_inputs/update201712.dss to a new DSS file, for example MTZ_201712.dss, under the folder /MTZ_stage_EC/fill_stage. Check the data in the file MTZ_201712.dss, remove errors in the stage timeseries, record the time window of each data gap at the end of the text file input.txt, and delete the out-of-date time windows in the file input.txt.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 One hour shift of Daylight saving time. Compare the astronomical tide and CDEC retrieved stage at Martinez, select the time window of one hour shift due to Daylight saving time, and shift one hour to match the phases of astronomical tides. The time shift can be conducted in HEC-DSSVue under the menu Tools -> Math Functions ->Time Functions -> Operator: Shift in Time. Record the data gaps in March and November at the end of input.txt because of the time shift.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Prepare the input file. Prepare the input text file (input.txt) for the python script (fillgaps.py) to fill the stage gaps. Below is an example of the file input.txt. It includes the data version(tmark), names of input and output DSS files, the pathnames of timeseries used in this gap filing, and the time windows of the stage data gaps found. The time windows are suggested to be longer than one day or two days to fill the gaps smoothly.\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Run the python script:

Python fillgaps.py input.txt

2.\u00a0\u00a0\u00a0\u00a0\u00a0 Martinez EC (under the folder /MTZ_stage_EC /fill_EC)

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Remove the errors in the observed 15-minute EC data. Copy Martinez 15-minute EC data from /DSM2_flow_stage_EC_inputs/update201712.dss to a new DSS file, for example MTZ_201712.dss, under the folder /MTZ_stage_EC/fill_EC. Open MTZ_201712.dss, find and remove the errors in the Martinez 15-minute EC data, and record the data gaps in the text files .\\timewindows_ec_ave.txt.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Average the filtered 15min MRZ EC to hourly MRZ EC and save part F of the pathname as /100_E_AVE/. Then the hourly MRZ EC has the pathname as

/CDEC/MRZ/EC//1HOUR/100_E_AVE/

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Copy Mallard hourly and daily EC data from /DSM2_flow_stage_EC_inputs/ ancillary201712.dss to the same DSS file /MTZ_stage_EC/fill_EC/MTZ_201712.dss.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Calculate NDOI by combining 6 inflows, 6 exports and Delta consumptive use.

1)\u00a0\u00a0\u00a0\u00a0\u00a0 In the file /DSM2_flow_stage_EC_input/updateYYYYMM.dss, use HEC-DSSVue to sum up the six inflows into one total inflow with the pathname

Path1: /CDEC/TOTAL/FLOW//1DAY/20_H/.

The pathnames of the inflows are listed below,

Sacramento inflow: \u00a0\u00a0/CDEC/FPT/FLOW//1DAY/20_H/

San Joaquin inflow:\u00a0 \u00a0/CDEC/VNS/FLOW//1DAY/20_E/

Yolo Bypass inflow: \u00a0\u00a0/CDEC/LIS/FLOW//1DAY/20_E/

Calaveras inflow:\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 /CDEC/NHG/FLOW//1DAY/23_H/

Cosumnes inflow: \u00a0\u00a0\u00a0\u00a0\u00a0/CDEC/MHB/FLOW//1DAY/20_H/

Mokelumne inflow:\u00a0 \u00a0/FILL+CHAN/RMIL070/FLOW//1DAY/DWR-DMS-YYYYMM/

2)\u00a0\u00a0\u00a0\u00a0\u00a0 Also sum up the six exports into one total export with the pathname

Path2: /CDEC/TOTAL/FLOW_EXPORT//1DAY/70_D/.

The pathnames of the six exports are listed below,

SWP:\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0/CDEC/HRO/FLOW_EXPORT//1DAY/70_D/

CVP:\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 /CDEC/TRP/FLOW_EXPORT//1DAY/70_D/

North Bay aqueduct:\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0/CDEC/BKS/EXPORT//1DAY/70_D/

Old River near Brentwood:\u00a0\u00a0\u00a0\u00a0 /CDEC/INB/FLOW_EXPORT//1DAY/70_D/

Old River near discovery bay: /CDEC/IDB/FLOW_EXPORT//1DAY/70_D/

Middle River:\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0/CDEC/CCW/FLOW_EXPORT//1DAY/70_D/

3)\u00a0\u00a0\u00a0\u00a0\u00a0 Calculate the total inflow Path1 minus the total export Path2, and save it as the timeseries with the pathname

Path3: /CDEC/TOTAL/FLOW //1DAY/INFLOWS-EXPORTS/

4)\u00a0\u00a0\u00a0\u00a0\u00a0 Copy the timeseries with Path3 to the DSS file /MTZ_stage_EC/fill_EC/MTZ_201712.dss

5)\u00a0\u00a0\u00a0\u00a0\u00a0 Copy the latest version of dicu_YYYYMM.dss from the folder /DICU/NODCU/NODCU12/1922-2017 to the folder /MTZ_stage_EC/fill_EC.

6)\u00a0\u00a0\u00a0\u00a0\u00a0 Use HEC-DSSVue to open file /MTZ_stage_EC/fill_EC /dicu_YYYYMM.dss, and sum up all the timeseries with part C DIV-FLOW to one timeseries with

Path 4: /DICU-HIST+NODE/TOTAL/DIV-FLOW//1MON/DWR-BDO/

7)\u00a0\u00a0\u00a0\u00a0\u00a0 Sum up all the timeseries with part C DRAIN-FLOW to one timeseries with

Path 5: /DICU-HIST+NODE/TOTAL/DRAIN-FLOW//1MON/DWR-BDO/

8)\u00a0\u00a0\u00a0\u00a0\u00a0 Sum up all the timeseries with part C SEEP-FLOW to one timeseries with

Path 6: /DICU-HIST+NODE/TOTAL/SEEP-FLOW//1MON/DWR-BDO/

9)\u00a0\u00a0\u00a0\u00a0\u00a0 Calculate Delta consumptive use by Path 4+Path6-Path5, and save it as one timeseries with the pathname

Path 7: /DICU-HIST+NODE/TOTAL/FLOW//1MON/DWR-BDO/

10)\u00a0 Convert the timeseries with path 7 into daily data,

Path 8: /DICU-HIST+NODE/TOTAL/FLOW//1DAY/DWR-BDO/

and copy the daily one to the DSS file MTZ_stage_EC/fill_EC/MTZ_201712.dss

11)\u00a0 Open file MTZ_201712.dss, and calculate NDOI by subtracting the timeseries with Path8 from that with Path 3. The NDOI data has the pathname

Path 9: \"/FILL+CHAN/NDOI/FLOW//1DAY/ DWR-DMS-YYYYMM/

Here YYYY and MM are the year and month of the updated version.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Prepare the input file. Prepare the input text file, such as timewindows_ec_ave.txt, for the python script (fillgaps_ec.py) to fill the EC gaps. Below is an example of the file timewindows_ec_ave.txt. It includes the data version(tmark), names of input and output DSS files, and the time windows of the data gaps found.\u00a0\u00a0\u00a0

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Run the Python script, and obtain the output file filled.dss.

Python fillgaps_ec.py timewindows_ec_ave.txt

3.\u00a0\u00a0\u00a0\u00a0\u00a0 Merge the updated data and the previous version of DSM2 input data

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 After the data gaps have been filled, copy QA/QC\u2019d Martinez 15-minute stage and 1-hour EC from the folders /MTZ_stage_EC/fill_EC and /MTZ_stage_EC/fill_stage into the file \\DSM2_flow_stage_EC_inputs\\updateYYYYMM.dss.

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 When all the input timeseries have been QA/QC\u2019d, the previous version of DSM2 input data must be extended with the updated data. \u00a0in the file \\DSM2_flow_stage_EC_inputs\\updateYYYYMM.dss. Copy the previous version of DSM2 input data, histMMYY.dss, into the folder \\merge_data, update the names of merged files, the name of output file, and the versions in the file pathnames.txt, and run the Python script:

Python merge_data.py pathnames.txt

-\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Check all the pathnames in the latest version match those in DSM2 inp files.

"},{"location":"reference/Update_DSM2_historical_simulation/#step-4-gate-operations-update-under-the-folder-gateoperations","title":"Step 4: Gate operations update (under the folder \\gateoperations)","text":"

1.\u00a0\u00a0\u00a0\u00a0\u00a0 Delta Cross Channel

Download the gate operations from https://www.usbr.gov/mp/cvo/vungvari/Ccgates.pdf

2.\u00a0\u00a0\u00a0\u00a0\u00a0 Clifton Court gate

Ask Liu, Siqing (Siqing.Liu@water.ca.gov) from O&M. O&M collects the inputs without QA/QC and updates DSM2 monthly, so their inputs can be taken as the preliminary inputs.

3.\u00a0\u00a0\u00a0\u00a0\u00a0 South Delta temporary barriers and Montezuma Slough gate

Go to http://baydeltaoffice.water.ca.gov/sdb/tbp/web_pg/tempbsch.cfm, or ask Michal Burn, South Delta Section to get the temporary barriers gate operations, or ask O&M. The available values of the gate parameters are listed in the file \\gateoperations\\barriers_values_03082012.xlsx. The Vertical Datum in the Excel file is NGVD29, while that in the gate operation DSS file is NAVD88. When the DSS file is updated, the Datum difference must be counted.

4.\u00a0\u00a0\u00a0\u00a0\u00a0 Tune the gate operation times

All the information of gate operations collected above are added in \\gateoperations\\gate-v8-YYYYMM.dss. The collected gate operation schedules are normally not the actual gate operations. After all the inputs of DSM2 historical simulation are prepared, pre-run DSM2 HYDRO and check if the simulated upstream and downstream stages/flows of each gate match the observed stages/flows. If not, tune the gate operation times until the simulated stage variations in time reflect the gate operation schedules accurately. Table 2 is the list of CDEC stations to check the gate operation schedules. CDEC stages or flows in the table have been downloaded and saved in the file ancillary201712.dss in Step 2.\u00a0\u00a0

Table 2 The CDEC stations to check gate operations

Barriers RKI\u00a0or Channel No CDEC station Old River @ Head Upstream Mossdale MSD Downstream Channel\u00a055, ROLD074 \u00a0OH1 Old\u00a0River at Tracy Upstream ROLD047 OAD Downstream ROLD046 OBD Grant\u00a0Line Canal Upstream Channel 205 DGL Downstream CHGRL009 GLC Middle\u00a0River Upstream RMID027 MTB Downstream Channel 135 No\u00a0station Delta\u00a0Cross Channel Downstream Channel\u00a0365 DLC Montezuma\u00a0Slough Upstream Collinsville\u00a0at Sac. River CSE Downstream Roaring\u00a0River MSL"},{"location":"reference/Update_DSM2_historical_simulation/#attachments","title":"Attachments:","text":"

plots_for_chapters.png (image/png) MRZ_EC_input.png (image/png) sample_file_1.png (image/png)

"},{"location":"reference/Versions/","title":"Versions","text":"

If grid/arc is changed (move/split/merge) at any boundaries of the Delta,\u00a0 CALSIM \u2192 DSM2 preprocess scripts (and DSM2 configuration)\u00a0should change, i.e.\u00a0a different version. Some typical keywords are usually used as part of the scenario version name:

  • Existing (EST) as the current Delta condition (could be different as time goes), No Action Alternative (NAA) as future scenarios without major grid change, Proposed Alternative (PA) as future scenarios with any major grid change (construction, etc)
  • Level of development (LOD, 2005, 2030 etc) represent Land of Use info, etc. Since census are not usually conduct very frequently, different scenarios could use the same LOD.
  • Sea level rise projection (SLR, 15cm, 45cm etc) represent climate change scenarios.

California Water Fix settings of Calsim and DSM2 (page 70, Table B-8) could be referred as an example.

http://baydeltaconservationplan.com/Libraries/Dynamic_Document_Library/Final_EIR-EIS_Appendix_5A_-CALSIM_II_and_DSM2_Modeling_Simulations_and_Assumptions_Appendix-_Section_B.sflb.ashx

Other than the above, operation change\u00a0or constraints of standard usually only affects CalSIM results, not DSM2 settings. Thus the DSM2 preprocess and configuration don't need changes, i.e. we can just place in the results. A practical routine is use the same file name/modifier/path for various scenarios within 1 version category (modify folder name, or use unique name for the post-process).

Some commonly used versions in the office are listed below.

  • Original scripts package in DSM2/scripts or vista/scripts/dsm2. This is the original version.
  • CH2M helped preparing CWF related EST, NAA,\u00a0PA, in combination of LOD and SLR. The earlier 2\u00a0are widely used as template in recent years.
  • SWP Delivery Capability Report (DCR), Water Storage Investment Program (WSIP)
  • Most widely used versions are in DSM2 v806 for now. Effort has been made to update it to DSM2 v812 with 1 practice for EST. (Annual Report 2017 Chapter 1)
  • One recent related practice is a new version for\u00a0CALSIM3 to DSM2 in SWP Fingerprinting study.
"},{"location":"tutorials/An_Introduction_to_DSM2_Tutorials/","title":"An Introduction to DSM2 Tutorials","text":"

DSM2 Website

Official Website

Documentation

If DSM2 is installed on your computer, clicking on the START menu and select Programs \u2192 DSM2_v8 \u2192 DSM2_documentation

"},{"location":"tutorials/An_Introduction_to_DSM2_Tutorials/#introduction","title":"Introduction","text":"

Welcome to the Delta Simulation Model 2 (DSM2) Version 8 tutorial.

The tutorial is divided into two sets of lessons. The first set teaches basic DSM2 skills using simplified channels. The second set of tutorials explores more advanced DSM2 skills using the model application to the Sacramento-San Joaquin Delta. The input files for these tutorials are in the tutorial\\simple and tutorial\\historical directories respectively.

The goal of the beginning tutorials (BasicTutorials 1-6, see Figure 1) is to familiarize you with the DSM2 input system and fundamental modeling capabilities. This six-part tutorial builds a model of a simple channel system, with each part building in complexity from its predecessor. It is recommended that the tutorials be completed in order, but it is not necessary since the tutorials are self contained.

Figure 1: DSM2 Basic Tutorials

The goal of the Delta tutorials (DeltaTutorials 1-5, see Figure 2) is to familiarize you with Delta specific DSM2 applications and tasks. In addition a DSM2 Overview document has been provided that describes the DSM2 modules (HYDRO, QUAL, and PTM) and their typical modes of application (historical, real-time and planning).

Figure 2: DSM2 Delta Tutorials

{DSM2_home}

In working the tutorials, the directory where you installed DSM2 will be referred to as {DSM2_home}. E.g., if you accepted the default install directory, {DSM2_home} would be d:\\delta\\dsm2 (there may also be a version number in the directory name).

The first tutorial is called Channels, and involves setting up the channel grid, adding parameters, setting boundary conditions, and listing output locations. The second tutorial is called Reservoir Gate Transfer, and involves adding these components to the simple channel system.

The third tutorial is called Layering. The section guides you through the nuances of organizing data in multiple files. Layers are part of the DSM2 data management system. They allow input items to be grouped in logical bundles, and allow changes to be brought into an old simulation without erasing or altering archived items.

The fourth tutorial is called Timevar, and demonstrates the addition of time-varying information to the model. In the previous sections, all boundary conditions and gate timings were set as constant, and no input files were needed. In this section, the model is set to read time-varying information stored in DSS files.

The fifth tutorial is called Output, and covers advanced output options. The first part involves modifications to the text input file, hydro.inp. The second part describes the use of groups and source tracking in QUAL.

The sixth tutorial is called Oprule, and covers the use of Operating Rule Language (ORL) statements to set gate operations. In the previous versions of DSM2, the input text and time series files had to explicitly state the operations of gates. With the operating rules, expressions can be used to make the model operate gates on-the-fly. E.g., a gate can be directed to automatically close when salinity conditions reach a certain threshold.

There are two icons that are used to highlight information in the tutorials. Indicates a DSM2 \"gotcha\" moment in which extra care may be necessary. Indicates a question to put your new found DSM2 knowledge to the test.

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_1_Historical_Simulation/","title":"DSM2 Bay-Delta Tutorial 1: Historical Simulation","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_1_Historical_Simulation/#purpose","title":"Purpose:","text":"

This tutorial will demonstrate how to launch a basic run of the historical HYDRO and QUAL simulations.You will also get practice using the study templates that are distributed with DSM2, see how the configuration file is used, make some changes in the output and learn about the post-processing \"transfer\" script for averaging your output.

Except as part of a re-calibration, it is rare to make big changes in the historical simulation. More commonly, you will want to add a few output locations or scalars. Large scale policy or physical changes are usually analyzed within a Planning simulation framework, covered in a later tutorial.

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_1_Historical_Simulation/#hydro-and-qual","title":"HYDRO and QUAL","text":"
  1. Copy the historical template:
    1. In windows, copy the folder\u00a0\\{DSM2_home}\\study_template\\historical\u00a0to the tutorial directory, after creating\u00a0\\{DSM2_home}\\tutorials\\historical. If there is already a historical folder, just copy the contents.
    2. Open\u00a0historical_hydro.inp\u00a0and\u00a0historical_qual_ec.inp.\u00a0Note the CONFIGURATION sections of both reference a file\u00a0configuration_historical.inp. By containing variables such as run dates in this file, you can more easily synchronize the models.
    3. Examine the\u00a0common_input\u00a0directory. By looking at\u00a0historical_hydro.inp, configuration_historical.inp\u00a0and the other main input files, you will see that many of the included files for the models are in the directory ${DSM2INPUTDIR}. In this distribution, this variable points to\u00a0/dsm2/common_input\u00a0\u2013 a repository in which all the distributed DSM2 input files are housed. Later, you may want to copy the input files locally and repoint ${DSM2INPUTDIR} to this local directory. In fact, there are tools to help with this. Regardless of whether you copy them, please resist changing the files directly \u2013 it is much easier to diagnose problems if you make your changes in the main file (historical_hydro.inp, historical_qual_ec.inp\u2026) or in a new file of your own making.
  2. Modify the Run Times in the Configuration File:

In the configuration file, set the runtime definitions as follows.

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_1_Historical_Simulation/#runtime","title":"runtime","text":"

START_DATE 01JUL1996 START_TIME 0000 QUAL_START_DATE 02JUL1996 PTM_START_DATE ${QUAL_START_DATE} END_DATE 01SEP1996 END_TIME 0000\u00a0

  1. Note the Output Step in HYDRO:

If you look in the channel output files (e.g.\u00a0output_channel_std_hydro_rki_20090715.inp), you will find that the time step of the output is itself an ENVVAR definition called ${FINE_OUT}. This is usually defined as 15 minutes in configuration file. Although DSM2 v8 will perform daily averages, it is recommended that you use the finer output and aggregate as a postprocessing step (we will cover this shortly).

  1. Add some Output

In historical_hydro.inp, add a block containing an extra flow output for Old River at Head. Notice that the name in this case is a \"practical\" name. Although you may sometimes add input with names like \"ch56_0\", such a name is redundant with the other information in the line, is difficult for non-modelers to understand and causes confusion if the grid numbering changes.

OUTPUT_CHANNEL NAME CHAN_NO DISTANCE VARIABLE INTERVAL PERIOD_OP FILE\u00a0 oldr_head 56 0 flow ${FINE_OUT} inst ${HYDROOUTDSSFILE}\u00a0 END\u00a0

  1. Run HYDRO and QUAL:

    1. In Windows Explorer, navigate to the directory, _\\{DSM2_home}\\tutorial_
    2. Right-click on the\u00a0historical\u00a0directory, and select,\u00a0Open Command Window Here.
    3. In the command window, type:\u00a0hydro historical_hydro.inp
    4. Wait for HYDRO to complete its runs.
    5. Now type:\u00a0qual historical_qual_ec.inp
  2. Aggregate the Output

Above we recommended that you use post-processing to aggregate your output. Let's see how this works. At a command prompt in the ${study}/output directory, type: > transfer -\u2013help This command should give you the options for the \"transfer.py\" script that will help you aggregate your output.\u00a0 For instance, if you want to create a daily average of all your flow output, type (this is all one line):\u00a0 >transfer --out=postpro.dss --selection=///FLOW////\u00a0 --transform=period_ave --interval=1DAY historical.dss\u00a0 As another example, you may want to take a Godin average of all the stage output and put it in the same file:\u00a0 >transfer --out=postpro.dss --selection=///STAGE////\u00a0 --transform=godin historical.dss\u00a0 You can similarly do monthly averages by making the interval 1MON and you can \"slice\" in time by specifying a time window (the syntax is given by the help command: > transfer -\u2013help\u00a0

  1. Running QUAL with Volumetric fingerpringting:

    1. In the command window, type:\u00a0qual historical_qual_vol.inp.
    2. Open the qual echo file qual_vol_echo_historical.inp in the output subfolder.
    3. Open the results file in the output subfolder, and examine the results.
  2. Running QUAL with Nonconservative Constituents fingerpringting:

    1. In Windows Explorer, navigate to the directory,\u00a0\\{DSM2_home}\\study_template_ _historical_qual_do\\ Conduct a similar study as EC and VOL.
    2. Notice that the running time period is 1996-2000, since Stockton effluent is not using 'constant'\u00a0but detailed timeseries:\u00a0effluentflow96-2000.dss

ENVVAR NAME VALUE\u00a0 STOCKTON_FLOW_FILE ${TSINPUTDIR}/effluentflow96-2000.dss # needed for DO runs, if not available use constant END

    1. Open the results file in the output subfolder, and examine the results.
"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_1_Historical_Simulation/#particle-tracking-modeling-ptm","title":"Particle Tracking Modeling (PTM)","text":"
  1. Run PTM in Delta Grid under Historical Condition

    1. In Windows Explorer, navigate to the directory, _

      Unknown macro: {DSM2_home}{_}tutorial\\ in the command window, type:\u00a0ptm historical_ptm.inp. *If necessary, reduce the running time period by modifying\u00a0END_DATE\u00a0in\u00a0configuration_historical.inp. 2. Open the ptm echo file ptm_echo_historical.inp in the output subfolder and examine the contents. 3. Open the ptmout.dss file in the output subfolder, and examine the results. Do a little mass balance to see if the particle fluxes add up.

  2. Repeat with Particle Filter on Channel Turned on:

Set particle filter at Head of Old River\u00a0

    1. In historical_ptm.inp, create the table for particle filter, with constant closing operation.

PARTICLE_FILTER NAME NODE AT_WB FILLIN FILE PATH Filter_HOR 8 chan:54 last constant 0 END\u00a0

    1. Add the related output, like

PARTICLE_FLUX_OUTPUT NAME FROM_WB TO_WB INTERVAL FILE SJR-OLD chan:7 chan:54 15min ${PTMOUTPUTFILE} END\u00a0

    1. Open the ptmout.dss file in the output subfolder, and examine the results
  1. Repeat with Particle Filter on Reservoir Turned on:

With particle filter installed at Clifton Court Forebay (this is a special version of filter dealing with source flows directly connecting to reservoir)\u00a0

    1. In historical_ptm.inp, create the table for particle filter, with time-varying operation control, specified in DSS file.

PARTICLE_RES_FILTER NAME RES_NAME AT_WB FILLIN FILE PATH clfc_div_bbid clifton_court qext:dicu_div_bbid last ./filterOp.dss /HIST+FILTER/CLFC_DIV/FILTER_OP//IR-DECADE/DWR-BDO/ END\u00a0

    1. Add the related output, like

PARTICLE_FLUX_OUTPUT NAME FROM_WB TO_WB INTERVAL FILE SWP-AG res:clifton_court group:bbid 15min ${PTMOUTPUTFILE}\u00a0 END\u00a0

    1. Open the ptmout.dss file in the output subfolder, and examine the results
  1. Repeat with Particle Filter on Source Flow Turned on:

Agriculture source flow (diversions and seepages) could be required to restrict particles from entering in simulations. It is one application for particle filter.\u00a0

    1. In Windows Explorer, navigate to the directory, \\{DSM2_home}\\tutorial\\ Open the file\u00a0delta_dicu_filter_closed.txt. Copy the content into historical_ptm.inp
    1. Open the ptmout.dss file in the output subfolder, and examine the results
"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_1_Historical_Simulation/#making-animation-of-particle-tracking-modeling-ptm","title":"Making animation of Particle Tracking Modeling (PTM)","text":"
  1. Modify the PTM input file to make text output and to turn on the dispersion parameters:
    1. In Windows Explorer, copy the folder\u00a0ptm_animate\u00a0(with subfolders) from\u00a0\\{DSM2_home}\\study_templates\\ptm_animate

to the study directory, creating: \\{DSM2_home}\\tutorials\\historical\\ptm_animate

    1. With the PTM, it is useful to be able to switch easily between text and dss output formats \u2013 note that the animator requires text files. The\u00a0configuration_historical.inp\u00a0file is structured so that we can swap the environmental variable\u00a0PTMOUTPUTFILE. We are going to point\u00a0PTMOUTPUTFILE\u00a0to txt format so we can use the animator. 1. 1. Locate the\u00a0PTMOUTPUTFILE\u00a0at the end of the file, and modify as:

PTMOUTPUTFILE ptmout.txt

    1. Open the file,\u00a0historical_ptm.inp. 1. Locate the SCALARS section. Check all of the dispersion parameters to be\u00a0t.

ptm_ivert\u00a0t\u00a0# Use Vertical velocity profile ptm_itrans\u00a0t\u00a0# Use Transverse velocity profile ptm_iey\u00a0t\u00a0# Use transverse mixing ptm_iez\u00a0t\u00a0# Use vertical mixing

      1. Make sure the\u00a0anim_db.bin\u00a0line is turned on (this is usually commented out to save much running time)

ptm anim out 15min ${DSM2OUTPUTDIR}/anim_db.bin\u00a0

  1. Run PTM:
    1. In the command window, type:\u00a0ptm historical_ptm.inp.
    2. In Windows Explorer:
      1. Navigate to the directory,

\\{DSM2_home}\\tutorials\\historical\\output

      1. Examine the output in the\u00a0ptmout.txt\u00a0file. 2. Copy the files,\u00a0anim_db.bin\u00a0and\u00a0ptmout.txt. 3. Navigate to the directory,

\\{DSM2_home}\\tutorials\\historical\\ptm-animate\\dual\\left_panel

      1. Paste the files in the\u00a0left_panel\u00a0directory.
  1. Repeat with Dispersions Parameters Turned Off:

    1. In Windows Explorer, navigate to the directory, _\\{DSM2_home}\\tutorials\\historical_
    2. Open the file,\u00a0historical_ptm.inp.
      1. Locate the SCALARS section.
      2. Change all of the dispersion parameters from\u00a0t\u00a0to\u00a0f.

ptm_ivert\u00a0f\u00a0# Use Vertical velocity profile ptm_itrans\u00a0f\u00a0# Use Transverse velocity profile ptm_iey\u00a0f\u00a0# Use transverse mixing ptm_iez\u00a0f\u00a0# Use vertical mixing

    1. In the command window, type:\u00a0ptm historical_ptm.inp.
    2. In Windows Explorer:
      1. Navigate to the directory,

\\{DSM2_home}\\tutorials\\historical\\output

      1. Copy the files,\u00a0anim_db.bin\u00a0and\u00a0ptmout.txt. 2. Navigate to the directory,

\\{DSM2_home}\\tutorials\\historical\\ptm-animate\\dual\\right_panel

      1. Paste the files in the\u00a0right_panel\u00a0directory. 2. Navigate to the directory,

\\{DSM2_home}\\tutorials\\historical\\ptm-animate

      1. Double-click on\u00a0dual.bat\u00a0to open the animator. 2. Press start to start the animator and use the controls to adjust the speed.
  1. Modifying the Animator Display:

    1. The\u00a0left_panel\u00a0and\u00a0right_panel\u00a0directories contain files needed for operation:
      1. Modify the data path names:\u00a0fluxInfoDB.data\u00a0stores file and path information for the PTM output (the flux output in the text file is labeled with DSS-like path names). The listings in this file will be turned into the small flux bar graphs you see in the animator. The integer you see above the file name is an internal node ID, which is how you assign locations in the animator (also see\u00a0network.dat\u00a0below). Also, an output file of the PTM version 8 contains a minor version number. So the user may need to modify the data path names in the\u00a0fluxInfoDB.data\u00a0according to corresponding path names in an output file,\u00a0ptmout.txt\u00a0in this example.
      2. labelsDB.data\u00a0stores label information. You list labels and their location (using nodes, see\u00a0network.dat\u00a0below)
      3. network.dat\u00a0stores internal\u00a0x-\u00a0and\u00a0y-locations for nodes and channels. Pseudo-nodes are used for labels and other annotations as noted above. Please note that the nodes that are used in\u00a0network.dat\u00a0are internal node numbers, not external. (This makes the file very hard to edit, a point that will probably be addressed in the future). If you want a mapping of external-to-internal numbers, look at your echoed hydro output file (*.out or *.hof).
    2. Examine these files and the labels in them. Change the labels to something creative and reopen the animator.
"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_2_Source_Tracking_Fingerprinting_/","title":"DSM2 Bay-Delta Tutorial 2: Source Tracking (Fingerprinting)","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_2_Source_Tracking_Fingerprinting_/#purpose-the-purpose-of-this-tutorial-is-to-use-the-source-tracking-capabilities-of-the-model-to-create-a-fingerprinting-study-we-will-set-up-both-volumetric-and-concentration-based-fingerprinting-and-visualize-the-results","title":"Purpose: The purpose of this tutorial is to use the source tracking capabilities of the model to create a fingerprinting study. We will set up both volumetric and concentration-based fingerprinting and visualize the results.","text":"
  1. Reopen the historical tutorial

    1. In windows, navigate to \\{DSM2_home}\\tutorial\\historical. (folders and files are copied as described in the Delta tutorial 1)
  2. Create a model for source tracking:

In the background, source tracking imposes a computational cost on QUAL that is the same as one additional constituent per source. For this reason, it is useful to comment out source tracking as a standard course of running DSM2. But when you desire source tracking, you can uncomment it as follows:

    1. In historical_qual_ec.inp, locate the GROUPS include section.
    2. Uncomment the group definitions for source tracking (delete the # sign at the start of the line). You may wish to review the referenced file to see how the groups are identified.
    3. Similarly uncomment the two fingerprinting files \u2013 the ones that have \"source_track\" in their names.
  1. Define volumetric inputs

    1. Create the QUAL volumetric input file. Copy historical_qual_ec.inp and rename as historical_qual_vol.inp.
    2. Modify the concentration blocks. Go through each of the node and reservoir concentration files for QUAL ec. Modify the constituent (variable) to unit, value (FILE) to constant, (PATH) to 100. This step is conceptually simple, but will produce a large file \u2013 feel free to break it into several files if you prefer. If you are using Notepad++, you may want to use its column delete/copying features (press alt while you make your selection).
    3. Compare what you produced to the existing files in common_input that have \"volumetric\" in their names (node and reservoir concentration). Are they the same input? How could you test this using the echoed output?
  2. Define the fingerprinting output

    1. Specify Clifton Court concentration output for each of the source groups defined in the previous step, for both constituents: ec and unit, in block OUTPUT_RESERVOIR_SOURCE_TRACK. The name should be clifton_court, the concentration (variable) should be ec or volume and the interval should be 1day. Avoid redundancy or use of the source in the output name: i.e. use \"clifton_court\" for the name, not \"clifton_ag\" or \"clifton_ec\" . Because the source information is recorded in the F part of output dss file.
    2. Similar specification could be defined for channel source track in block OUTPUT_CHANNEL_SOURCE_TRACK. Pick any channel you are interested and do the definition.
  3. Run HYDRO and QUAL for One Year

    1. Using historical_hydro.inp, historical_qual_ec.inp, historical_qual_vol.inp as the launch files, run HYDRO and QUAL for one year in 2002. Start QUAL a day later to avoid mass conservation errors in the first hour. Make sure the init_conc variable (in SCALAR block) is set to zero so that there will be no initial condition contribution for any variables (note: for a volumetric fingerprint, it may be useful to make this concentration 100 if you want to include initial conditions in the fingerprint analysis).
    2. Open the output file (historical.dss), and examine the results.
  4. Process the output

    1. Use VISTA or HEC-DSSVUE to open up the output file. Copy May-September concentrations source track of Clifton Court for each location. Paste the output into a new sheet in the Excel provided called excel_fingerprint.xls, which you can use as a reference. Use the \"stacked area plot\" in Excel (one of the standard Excel plot types) to plot up the fingerprint results.
"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_3_Planning_Simulation/","title":"DSM2 Bay-Delta Tutorial 3: Planning Simulation","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_3_Planning_Simulation/#purpose-the-goal-of-this-tutorial-is-to-learn-to-preprocess-and-launch-a-bay-delta-planning-simulation-using-calsim-output-as-the-basis-for-flow-inputs","title":"Purpose: The goal of this tutorial is to learn to preprocess and launch a Bay-Delta planning simulation using CalSim Output as the basis for flow inputs.","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_3_Planning_Simulation/#the-calsim-study-we-will-use-is-the-ocap_2005a01a_ewa2_71_novamp_dvdss-provided-in-the-tutorialsdata-directory-we-will-prepare-and-launch-the-run-using-both-temporary-barriers-and-permanent-barriers-configurations-sdip-south-delta-improvements-program","title":"The CalSim study we will use is the ocap_2005A01A_EWA2_71_novamp_DV.dss provided in the tutorials/data directory. We will prepare and launch the run using both temporary barriers and permanent barriers configurations (SDIP: South Delta Improvements Program).","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_3_Planning_Simulation/#preparation","title":"Preparation","text":"

We will begin by creating a study space to house the planning study.

  1. Copy the study template:

    1. In windows, navigate to \\{DSM2_home}\\study_templates. Copy the ocap_sdip template to \\{DSM2_home}\\tutorial\\ocap_sdip. Copy the ocap_temp_barrier template to \\{DSM2_home}\\tutorial\\ocap_temp_barrier
    2. In each new study folder, create a directory called \"output\" if there is not such a folder there already.
    3. Copy the file ocap_2005A01A_EWA2_71_novamp_DV.dss from \\{DSM2_home}\\timeseries to \\{DSM2_home}\\tutorial\\data\\calsim. Note that we just put this file in timeseries as a sample \u2013 in practice CalSim output will be exterior to the DSM2 distribution (or will be in the study folder).
  2. Preprocess for sdip and temp_barriers:

    1. Navigate to the ocap_sdip study directory and open config_sdip_ocap_71.inp.
    2. Make sure that the run dates are set to the full 1974-1991 sixteen year planning period. It is a good idea to preprocess the full period even if you want to run a subset of these dates.
    3. Set the DSM2MODIFIER to ocap_sdip_tutorial.
    4. Make sure that the DICU version in the configuration file is 2005, representing a 2005 level of development.
    5. Makes sure the STAGE_VERSION in the configuration file is PLANNING-2-SL.\u00a0\u00a0
    6. Make sure the configuration file is pointing to the right data, which means using the right directory, file and DSS path to find the CalSim results. In this case, set:
      1. CALSIMNAME to ocap_2005A01A_EWA2_71_novamp_DV (CalSim output file without the \".dss\" extension)
      2. CALSIMSTUDY_ORIGINAL to 2005A01A
      3. ~~ CALSIMDIR to ../data/calsim ~~
    7. Save your data
    8. Launch the preprocessing system. Obtain a command prompt and type:

> prepro config_sdip_ocap_71.inp

    1. Repeat the steps above for the temporary barriers directory and the configuration file config_ocap_temp_barriers.inp. Make sure that the dates span the full 1974-1991 period and repeat the checks (d) and (e) for the temporary barrier configuration file.
    2. Set the DSM2MODIFIER to ocap_temp_barrier_tutorial.
    3. Launch the preprocessor with the command:

> prepro config_ocap_temp_barriers.inp

  1. Run DSM2:
    1. In Windows Explorer, navigate to the directory, \\{DSM2_home}\\tutorial\\ocap_sdip
    2. Open the launch files hydro.inp and qual_ec.inp.
    3. Set the dates to a shorter period, 1974-1976, so that the run will take reasonable time for the tutorial. Note that we always preprocess the full period even when we attempt to shorten the run.
    4. Run the sdip simulation, for hydro and qual by typing:

> hydro hydro.inp,~~ocap_sdip_tutorial.dss ~~ > qual qual_ec.inp

    1. Uncomment and Repeat these steps (a-c) and run hydro and qual for the temporary barrier simulation.
  1. Examine the output:

    1. The temporary barriers and permanent barriers protect water levels in the South Delta in very different ways. Compare the output at ROLD059, Old River at Tracy Blvd for your two runs to see the differences.
"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/","title":"DSM2 Bay-Delta Tutorial 4: Batch Preprocessing","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/#purpose-this-tutorial-will-demonstrate-how-to-preprocess-a-number-of-calsim-output-files-each-of-which-represents-a-different-alternative-we-will-look-at-three-alternatives-but-the-techniques-apply-to-large-numbers-of-alternatives-just-as-well-in-the-process-of-this-tutorial-you-should-become-more-familiar-with-how-dsm2-and-calsim-label-their-simulations-and-scenarios-and-a-learn-a-little-bit-about-batch-files","title":"Purpose: This tutorial will demonstrate how to preprocess a number of CalSim output files, each of which represents a different alternative \u2013 we will look at three alternatives, but the techniques apply to large numbers of alternatives just as well. In the process of this tutorial, you should become more familiar with how DSM2 and CalSim label their simulations and scenarios and a learn a little bit about batch files","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/#calsim-files-a-typical-situation-with-planning-studies-is-that-the-input-scenarios-are-represented-by-different-calsim-output-files-sometimes-these-files-reside-in-a-directory-structure-that-follows-a-pattern-for-instance-the-first-two-alternatives-might-look-like-this","title":"CalSim Files: A typical situation with planning studies is that the input scenarios are represented by different CalSim output files. Sometimes these files reside in a directory structure that follows a pattern, for instance the first two alternatives might look like this:","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/#ccalsim","title":"C:/calsim","text":"

/altname1 /dss /d1641 2020d09edvsa.dss /altname2 /dss /d1641 2020d09edvsa.dss Note that this scheme CalSim uses directory structure to differentiate its output \u2013 the files and pathnames are identical. Another system you may encounter is one where the CalSim files themselves are named after the scenario:

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/#ccalsim_1","title":"C:/calsim","text":"

/altname1_2020d09edvsa.dss /altname2_2020d09edvsa.dss

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/#preprocessor-requirements","title":"Preprocessor requirements:","text":"

The DSM2 preprocessing scheme requires three pieces of information for each scenario:

  1. The DSM2 name we want to give the scenario (will become DSM2MODIFIER).
  2. The directory in which the CalSim output is found (will become CALSIMDIR)
  3. The name of the CalSim file (minus the .dss part \u2013 will become CALSIMNAME)

So for the first example above DSM2MODIFIER=altname1 CALSIMNAME=2020d09edvsa CALSIMDIR= c:/calsim/altname1/dss/d1641 How you will get this information into the preprocessing system depends on approach. We will look at two, but if you are an experienced script writer you will immediately see lots of possibilities.

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/#two-approaches-for-batch-jobs","title":"Two approaches for batch jobs:","text":"

For larger studies, you have some choices as to how to set things up. We'll look at a few that may help you get started, while experienced script writers are likely to come up with numerous interesting variations. These exercises will guide you in setting up modest batch processing and familiarize you a bit more with the concept of environmental variables at the command line and in windows \"batch\" scripts (files with a *.bat extension that list commands for the command line).

  1. You can create configuration files for each alternative, e.g. config_alt1.inp, config_alt2.inp. In each configuration file you hard-wire the information that is required is hardwired for that scenario. This method record of each scenario for people who inherit your study. It is a good choice when the number of alternatives is small. It is also a good choice when things other than CalSim vary between alternatives.
  2. Alternatively, you can create a single configuration file that points the three scenario-related variables to generic values. Then you use a batch_prepro.bat script to loop through the scenarios. When the number of simulations is very large (say 100 climate change scenarios) and the only difference in the inputs is CalSim, this method is efficient.

Now let's go through the exercises and check out the details.

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/#method-1-using-separate-configuration-files","title":"Method 1: Using separate configuration files:","text":"
  1. Create the configuration files:
    1. In windows, navigate to \\{DSM2_home}\\tutorial\\ocap_sdip. The alternatives we are using have generic sounding names, but they are compatible with OCAP assumptions.
    2. Copy the configuration file config_sdip_ocap_71.inp to config_alt1.inp
    3. Make sure the study dates cover the full 1974-1991 period for planning runs. It is usually a good idea to preprocess the whole period, even if you are going to do run dsm2 on a subset of the simulation period.
    4. Replace the three variables indicated below. The three lines may not be next to one another.

\\<file config_alt1.inp> ENVVAR NAME VALUE [other definitions\u2026] # NOTE: LINES SHOWN MAY NOT

    1. BE TOGETHER* CALSIMNAME 2005a01edv # File name, minus .dss DSM2MODIFIER alt1 # DSM2 name for alternative CALSIMDIR ..data/calsim/alt1 # CalSim output directory END
    1. Copy the file config_alt1.inp to config_alt2.inp. Repeat step (d) using alt2 as the DSM2MODIFIER.
    2. Prepare hydro.inp and qual.inp to handle a generic configuration file by making the name of the configuration file at the top of each an ENVVAR. We will be providing this from the command line or batch file \u2013 as an operating system environmental variable.

\\<file hydro.inp> CONFIGURATION ${CONFIGFILE} # Changed END \u2026 [other data]

    1. Prepare a batch file for preprocessing. It will have one line per alternative. Notice the \"call\" statement \u2013 this is the best way to call a succession of other batch files (prepro is itself a batch file called prepro.bat).

\\<file study_prepro.bat> call prepro config_alt1.inp call prepro config_alt2.inp

    1. At the command prompt, launch the preprocessing by typing:

> study_prepro.bat

    1. Now create a batch file that launches QUAL and HYDRO for every alternative in the study. For each alternative, you must set the environment variable CONFIGFILE, then launch the models.

\\<file study.bat> SET CONFIGFILE=config_alt1.inp hydro hydro.inp qual qual_ec.inp SET CONFIGFILE=config_alt2.inp hydro hydro.inp qual qual_ec.inp

    1. Launch the study batch file by typing at the command prompt:

> study.bat

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/#method-2-batch-file-that-loops","title":"Method 2: Batch file that loops","text":"
  1. *Create a generic configuration file:*
    1. In the looping method, we are going to describe the alternatives in a text file and loop through the text file. First we need a configuration file that is generic. Let's begin by copying config_sdip_ocap_71.inp one more time to a file called config_study.inp. Change the 3 variables (DSM2MODIFIER, CALSIMNAME and CALSIMDIR) as follows.

\\<file config_study.inp> ENVVAR NAME VALUE [other definitions\u2026] CALSIMNAME ${BATCH_CALSIMNAME} # File name, minus . DSM2MODIFIER ${BATCH_DSM2MODIFIER} CALSIMDIR ${BATCH_CALSIMDIR} # CalSim output directory dss

  1. DSM2 name for alternative [other definitions\u2026] END

  2. Create the scenarios.txt file

    1. In the study folder, create a file called scenarios.txt
    2. On each line of the file, put the scenario name (DSM2MODIFIER), directory (CALSIMDIR) and file name (CALSIMNAME) minus the \".dss\" extension.

\\<file scenarios.txt> alt1,../data/calsim/alt1,2005a01edv alt2,../data/calsim/alt2,2005a01edv

  1. Launch batch_prepro.bat
    1. In the study directory, obtain a command prompt and type:

> batch_prepro config_study.inp scenarios.txt

    1. Note: if the batch_prepro script fails for a particular scenario after running others successfully, first fix the problem and eliminate the failed (half-processed) scenario. Then avoid re-running the successful scenarios by adding the \"resume\" tag, for example:

> batch_prepro config_study.inp scenarios.txt resume If you type this command now, batch_prepro.bat will harmlessly do nothing.

  1. Examine and use the preprocessing products
    1. The preprocessing product is a HEC-DSS file for each scenario in the local time series directory. You should have one file per scenario.
    2. If you are doing this tutorial on your own, you may choose to launch dsm2 on each alternative. To do this, change the configuration file in hydro.inp and qual_ec.inp to the generic one:

\\<file hydro.inp> CONFIGURATION config_study.inp END

    1. Use batch_run.bat with the same syntax as you did for batch_prepro::

> batch_run config_study.inp Note that you may need to modify this script if you use it for something other than qual_ec. We may not be able to run the simulations in class because of the time required \u2013 but if you have extra time, change the dates to a one year (1991) and try it out.

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/","title":"DSM2 Bay-Delta Tutorial 5: Suisun Marsh Operating Rules","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/#purpose-the-objective-of-this-tutorial-is-to-learn-about-the-suisun-marsh-salinity-control-gate-and-practice-tuning-an-operating-rule","title":"Purpose: The objective of this tutorial is to learn about the Suisun Marsh Salinity Control Gate and practice tuning an operating rule.","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/#background","title":"Background:","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/#the-suisun-marsh-salinity-control-gates-smscg-were-completed-and-began-operating-in-october-1988-the-first-year-of-operation-was-used-to-test-the-gates-and-official-operation-began-in-november-1989-the-facility-consists-of-a-boat-lock-a-series-of-three-radial-gates-and-flashboards-the-smscg-control-salinity-by-restricting-the-flow-of-higher-salinity-water-from-grizzly-bay-into-montezuma-slough-during-incoming-tides-and-retaining-lower-salinity-sacramento-river-water-from-the-previous-ebb-tide-operation-of-the-smscg-in-this-fashion-lowers-salinity-in-suisun-marsh-channels-and-results-in-a-net-movement-of-water-from-east-to-west-when-delta-outflow-is-low-to-moderate-and-the-smscg-are-not-operating-net-movement-of-water-is-from-west-to-east-resulting-in-higher-salinity-water-in-montezuma-slough","title":"The Suisun Marsh Salinity Control Gates (SMSCG) were completed and began operating in October 1988. The first year of operation was used to test the gates, and official operation began in November 1989. The facility consists of a boat lock, a series of three radial gates, and flashboards. The SMSCG control salinity by restricting the flow of higher salinity water from Grizzly Bay into Montezuma Slough during incoming tides and retaining lower salinity Sacramento River water from the previous ebb tide. Operation of the SMSCG in this fashion lowers salinity in Suisun Marsh channels and results in a net movement of water from east to west. When Delta outflow is low to moderate and the SMSCG are not operating, net movement of water is from west to east, resulting in higher salinity water in Montezuma Slough.","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/#the-smscg-usually-begin-operating-in-early-october-and-depending-on-salinity-conditions-may-continue-operating-through-the-end-of-the-control-season-in-may-when-the-channel-water-salinity-decreases-sufficiently-below-the-salinity-standards-or-at-the-end-of-the-control-season-the-flashboards-are-removed-and-the-smscg-raised-to-allow-unrestricted-movement-through-montezuma-slough-details-of-annual-smscg-operations-can-be-found-in-summary-of-salinity-conditions-in-suisun-marsh-during-water-years-19841992-dwr-1994b-or-the-suisun-marsh-monitoring-program-data-summary-produced-annually-by-dwrs-environmental-services-office","title":"The SMSCG usually begin operating in early October and, depending on salinity conditions, may continue operating through the end of the control season in May. When the channel water salinity decreases sufficiently below the salinity standards, or at the end of the control season, the flashboards are removed and the SMSCG raised to allow unrestricted movement through Montezuma Slough. Details of annual SMSCG operations can be found in Summary of Salinity Conditions in Suisun Marsh During Water Years 1984\u20131992 (DWR 1994b), or the Suisun Marsh Monitoring Program Data Summary produced annually by DWR's Environmental Services Office.","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/#the-tidal-operation-of-the-gate-should-open-the-gate-when-a-water-level-drop-of-03-ft-exists-across-the-gate-upstream-to-downstream-and-to-close-the-gate-when-velocity-is-less-than-01-impending-flood-tide-the-boat-lock-is-held-open-whenever-the-radial-gates-are-operated-tidally-the-flashboard-is-typically-in-place-when-the-gates-are-operated-and-removed-when-the-gate-is-fully-open-note-that-in-the-historical-record-these-relationships-do-not-always-hold-there-have-been-numerous-operating-experiments","title":"The tidal operation of the gate should open the gate when a water level drop of 0.3 ft exists across the gate (upstream to downstream) and to close the gate when velocity is less than 0.1 (impending flood tide). The boat lock is held open whenever the radial gates are operated tidally. The flashboard is typically in place when the gates are operated and removed when the gate is fully open. Note that in the historical record these relationships do not always hold \u2013 there have been numerous operating experiments.","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/#we-will-use-martinez-ec-as-a-surrogate-to-determine-when-ec-compliance-is-an-issue-and-the-gates-need-to-be-operated-tidally","title":"We will use Martinez EC as a surrogate to determine when EC compliance is an issue and the gates need to be operated tidally.","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/#a-simplified-version-of-the-marsh-standards-is-given-below-the-units-are-millisiemens-per-square-cm-which-are-a-thousand-times-the-microsiemens-used-in-dsm2-modeling-there-is-a-water-year-dependence-in-the-full-set-of-standards-the-levels-given-in-the-table-apply-to-1974-1976-but-not-to-1977-which-is-a-deficiency-year-in-this-tutorial-we-will-consider-only-the-site-s-42-suisun-slough-volanti-slough-the-rki-for-this-location-is-slsus012-and-the-location-is-channel-494-distance-4681ft","title":"A simplified version of the Marsh standards is given below. The units are millisiemens per square cm, which are a thousand times the microsiemens used in DSM2 modeling. There is a water year dependence in the full set of standards \u2013 the levels given in the table apply to 1974-1976 but not to 1977 which is a \"deficiency year\". In this tutorial, we will consider only the site S-42, Suisun Slough @ Volanti Slough. The RKI for this location is SLSUS012, and the location is channel 494 distance 4681ft.","text":"

D-1641 STANDARD

OCT

NOV

DEC

JAN

FEB-MAR

APR-MAY

Eastern

C-2

19.0

15.5

15.5

12.5

8.0

11.0

S-64

19.0

15.5

15.5

12.5

8.0

11.0

S-49

19.0

15.5

15.5

12.5

8.0

11.0

Western

S-42

19.0

16.5

15.5

12.5

8.0

11.0

S-21

19.0

16.5

15.5

12.5

8.0

11.0

S-35

N/A*

N/A*

N/A*

N/A*

N/A*

N/A*

S-97

N/A*

N/A*

N/A*

N/A*

N/A*

N/A*

*In a good faith effort, DWR will consider S35 and S97 monitoring stations

*In a good faith effort, DWR will consider S35 and S97 monitoring stations

when deciding gate operations.

when deciding gate operations.

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/#steps","title":"Steps:","text":"
  1. Copy the study and configuration files:
    1. In windows, navigate to \\{DSM2_home}\\study_templates\\ocap_sdip.
    2. Copy the planning study to the \\{DSM2_home}\\tutorials directory.
    3. Rename config_sdip_ocap_71.inp as config_suisun.inp.
    4. In the configuration file, make sure the study dates cover the full 1974-1991 period for planning runs. It is usually a good idea to preprocess the full period of the inputs, even if you are going to do run dsm2 on a subset of the simulation period.
    5. Set DSM2MODIFIER to suisun.
    6. Run prepro on the file config_suisun.inp:

> prepro config_suisun.inp

  1. Examine and correct the Suisun Marsh operating rule.

The Montezuma Slough velocity close rule in oprule_montezuma_planning_gate.inp is based on flow (note this file name will have a version date appended to it. The rule requires correction to be based on velocity.

    1. Add a file representing a \"correction layer\" to the operating rules called oprule_revised_montezuma.inp.
    2. Correct the velclose part of the rule to be based on channel velocity. You can look up the correct variable name in the Operating Rule Guide in the html help system.
    3. Note the Martinez EC path used in the operating rule to determine whether the gate needs to be operated tidally. Open the suisun.dss input file and tidally or daily average this path. Then substitute the tidally averaged version of EC in the operating rule by overriding the time series definition in the Operation Time Series table.
    4. Note that the threshold for operating the gate is in the configuration file: MSCS_EC_THRESH 20000
  1. Run DSM2:

    1. In the configuration file, set the run dates to 1974 \u2013 1977.
    2. Add the output you will need to examine the S42 site using the information given in the introduction and the techniques you have learned from the other tutorials.
    3. Point the CONFIGURATION include file in hydro.inp to config_suisun.inp.

d. Launch HYDRO with the command: >hydro hydro.inp

  1. Examine the output.
    1. Compare EC output to the standard presented in the introduction. Is the gate over operating or underoperating?
    2. How can you further enhance the operating rule? Discuss the boatlock and flashboards.
"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_7_Clifton_Court_Diurnal_Pumping/","title":"DSM2 Bay-Delta Tutorial 7: Clifton Court Diurnal Pumping","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_7_Clifton_Court_Diurnal_Pumping/#purpose-the-goal-of-this-tutorial-is-to-learn-how-to-implement-a-diurnal-pumping-quota-for-banks-pumping-state-water-project-in-the-process-you-will-learn-how-to-track-totals-using-the-accumulate-function","title":"Purpose: The goal of this tutorial is to learn how to implement a diurnal pumping quota for Banks pumping (State Water Project). In the process you will learn how to track totals using the ACCUMULATE function.","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_7_Clifton_Court_Diurnal_Pumping/#background-the-banks-pumping-facility-is-often-operated-on-a-diurnal-schedule-emphasizing-pumping-during-off-peak-electricity-hours-an-example-of-summer-electricity-prices-for-the-year-2005-is-shown-in-figure-1","title":"Background: The Banks pumping facility is often operated on a diurnal schedule, emphasizing pumping during off-peak electricity hours. An example of summer electricity prices for the year 2005 is shown in Figure 1:","text":"

Figure 1: Example wholesale electricity prices in July 2005 (CWEMF, KT Shum)

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_7_Clifton_Court_Diurnal_Pumping/#an-idealized-schedule-from-the-point-of-view-of-electricity-would-be-to-pump-the-maximum-possible-amount-late-at-night-until-the-daily-pumping-needs-are-satisfied-actual-hourly-variations-in-pumping-are-shown-in-figure-2-numerous-other-factors-eg-ensuring-minimum-stage-requirements-in-the-forebay-can-affect-instantaneous-maximum-pumping-which-is-why-we-might-consider-an-operating-rule-instead-of-a-simple-time-series-to-model-diurnal-pumping","title":"An idealized schedule from the point of view of electricity would be to pump the maximum possible amount late at night until the daily pumping needs are satisfied. Actual hourly variations in pumping are shown in Figure 2. Numerous other factors (e.g. ensuring minimum stage requirements in the Forebay) can affect instantaneous maximum pumping, which is why we might consider an operating rule instead of a simple time series to model diurnal pumping.","text":"

In this tutorial we will emulate the ideal pumping schedule by tracking the amount pumped since midnight and quitting once we have pumped a total that satisfies the daily average requested by CALSIM. We will use the ACCUMULATE function to track the total. In a later step we will attenuate pumping to avoid drawing Clifton Court Forebay below -2ft NGVD. Figure 2: Diurnal Variation in Pumping, July-August 2004 (CWEMF, KT Shum)

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_7_Clifton_Court_Diurnal_Pumping/#the-planning-study-we-will-use-for-this-tutorial-is-ocap_sdip-provided-in-the-study_templates-directory-the-choice-between-temporary-and-permanent-barriers-is-not-central-to-the-material-though-the-sdip-project-did-propose-higher-pumping","title":"The planning study we will use for this tutorial is ocap_sdip *provided in the study_templates directory. The choice between temporary and permanent barriers is not central to the material, though the SDIP project did propose higher pumping.*","text":""},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_7_Clifton_Court_Diurnal_Pumping/#preparation","title":"Preparation","text":"

We will begin by creating a study space to house the planning study.

  1. Copy the study template:

    1. In windows, navigate to \\{DSM2_home}\\study_templates. Copy and rename the ocap_sdip template to \\{DSM2_home}\\tutorial\\ocap_sdip_diurnal_swp.
    2. If you have not already done so for a previous tutorial, copy the file ocap_2005A01A_EWA2_71_novamp_DV.dss (CALSIM output file used for planning runs) from \\{DSM2_home}\\timeseries to \\{DSM2_home}\\tutorial\\data\\calsim. Note that we just put this file in timeseries as a sample \u2013 in practice CalSim output will be exterior to the DSM2 distribution (or should go in the study folder).
  2. Preprocess for sdip barriers:

    1. Rename config_sdip_ocap.inp to config_sdip_ocap_diurnal_ccfb.inp and open the file.
    2. Make sure that the run dates are set to the full 1974-1991 (01OCT1974 0000 \u2013 01OCT1991 0000) sixteen year planning period. It is a good idea to preprocess the full period even if you want to run a subset of these dates.
    3. Set the DSM2MODIFIER to diurnal_pumping.
    4. Make sure that the DICU version in the configuration file is 2005, representing a future (2005) level of development.
    5. Makes sure the STAGE_VERSION in the configuration file is PLANNING-2-SL.
    6. Make sure the configuration file is pointing to the right directory, file and DSS path to find the CalSim results. In this case, set:
      1. CALSIMNAME to ocap_2005A01A_EWA2_71_novamp_DV (CalSim output file without the \".dss\" extension)
      2. CALSIMSTUDY_ORIGINAL to 2005A01A
      3. CALSIMDIR to ../data/calsim
    7. Save your data
    8. Launch the preprocessing system. Obtain a command prompt and type:

> prepro config_sdip_ocap_diurnal_ccfb.inp

  1. Add output for Clifton Court Forebay:

    1. In hydro.inp, add output that will allow you to more directly track the operations. Create an OUTPUT_RESERVOIR table. Create a 15min instantaneous output request with clfct or clifton_court as the name, clifton_court as the reservoir, none as the connecting node and flow-source as the variable. The flow-source output will give the total source and sink inflow to Clifton Court \u2013 it will differ from SWP pumping only by a small amount (due to Byron-Bethany Irrigation District).
  2. Run DSM2:

    1. In the configuration file, set the dates 01JAN1975 to 25JAN1975 so that the run will take a short time. These dates will generate the features we want for the tutorial, including a period of low stage at Clifton Court Forebay under diurnal operation. Note that we always preprocess the full period even when we shorten the run.
    2. Open hydro.inp file and change the included configuration file to config_sdip_ocap_diurnal_ccfb.inp and save it.
    3. Run the sdip simulation for HYDRO by typing:

> hydro hydro.inp

  1. Examine the output:

Once you have run HYDRO, open the file and look at the flow-source output for Clifton Court. This variable represents exports out of Clifton Court Forebay, which are dominated by State Water Project pumping..

"},{"location":"tutorials/DSM2_Bay-Delta_Tutorial_7_Clifton_Court_Diurnal_Pumping/#diurnal-operating-rule","title":"Diurnal Operating Rule","text":"

1. Create the diurnal rule with no Forebay stage protection:

    1. Create a file called oprule_diurnal_swp.inp. Create empty OPERATING_RULE and OPRULE_EXPRESSION tables. Alternatively, do this by copying, renaming and clearing the contents of another operating rule input file.
    2. Create an expression to accumulate daily State Water Project (SWP) pumping since midnight:
      1. Name: daily_total_swp
      2. Definition: \"ACCUMULATE(ext_flow(name=swp)*DT,0.0,HOUR==0)\"

This reads \"accumulate swp, starting at zero, resetting when the hour of the day is zero\". We multiply by DT to get a volume (which makes the rule time step independent and allows comparison to a daily target). The time series reference comes from elsewhere in the input and is the daily average pumping rate. It is perfectly acceptable to use time series that are defined elsewhere in the DSM2 input without redefining it in the OPRULE_TIME_SERIES table \u2013 the latter is just there to allow you to define any additional time series you might need.

    1. Create an expression to quantify the da.ily target. Note that we are multiplying an average daily flow in cubic feet per second by the number of seconds in the day to obtain a volume. 1. Name: daily_target_swp 2. Definition: ts(name=swp)*(60*60*24)
    2. Create an expression that defines maximum physical SWP pumping as a magnitude:
      1. Name: max_swp_pumping
      2. Definition: 9000.0
    3. Now, in the OPERATING_RULE table create a rule that pumps the maximum until the daily total is reached:
      1. Name: swp_diurnal
      2. Action: \"SET ext_flow(name=swp) TO IFELSE(abs(daily_total_swp) > abs(daily_target_swp), 0.0, -max_swp_pumping)\". Note the quotes and the minus sign: SWP is really a sink, not a source.
      3. Trigger: Use STARTUP or TRUE for the trigger (the two do the same thing, and trigger exactly once at the beginning of the run). The rule will be in use unless it is displaced by another operating rule.
    4. In hydro.inp, add the new oprule_diurnal_swp.inp file at the bottom of the OPERATIONS include block..
    5. Run HYDRO on the simulation. Examine the output for HYDRO, including Clifton Court reservoir water levels, flow through the gates to node 72 and the \"flow-source\" output for the reservoir (which will differ from SWP pumping by a small amount due to Byron-Bethany Irrigation District). Are you getting the fully-on-fully-off pumping pattern you expect? Could the same schedule be prepared off-line in advance using a 15-min time series for SWP pumping? Does Clifton Court water surface go below the \"warning\" level of -2.0ft NGVD needed to maintain flow in the fish facilities?
    6. Now create an expression identifying a low stage condition:
      1. Name: ccfb_stage_low
      2. Definition: res_stage(res=clifton_court) \\< -2.0
    7. Change the trigger for swp_diurnal to \"NOT ccfb_stage_low\", including the quotes.
    8. Create an expression that describes inflow into Clifton Court from the outside channel:
      1. Name: ccfb_inflow
      2. Definition: res_flow(res=clifton_court,node=72)
    9. Create a new operating rule that covers the critical case:
      1. Name: swp_low_stage
      2. Action:

\"SET ext_flow(name=swp) TO -min2(abs(ccfb_inflow),max_swp_pumping)\" This rule sets exports equal to the inflow to Clifton Court, which allows some pumping to continue as long as it does not further draw down Clifton Court. A simple alternative would be just to set exports to zero.

      1. Trigger: ccfb_stage_low

Note the minus sign, again because SWP exports are a sink rather than a source. The absolute sign is there to make sure the minimum function is not operating on any big transient negative flows.

    1. Rerun HYDRO. Are you getting the results you expected? Does Clifton Court stage go below -2.0? Are you still pumping according to the expected pattern? Could you implement this policy with a time series controlling SWP instead of an operating rule?
"},{"location":"tutorials/DSM2_Tutorial_Overview/","title":"DSM2 Tutorial Overview","text":""},{"location":"tutorials/DSM2_Tutorial_Overview/#dsm2-overview","title":"DSM2 Overview","text":"

DSM2 Modules

  • HYDRO

  • QUAL

  • *PTM*The Delta Simulation Model II (DSM2) is a one-dimensional mathematical model for dynamic simulation of one-dimensional hydrodynamics, water quality and particle tracking in a network of riverine or estuarine channels. DSM2 can calculate stages, flows, velocities, mass transport processes for conservative and non-conservative constituents including salts, water temperature, dissolved oxygen, and trihalomethane formation potential, and transport of individual particles. DSM2 thus provides a powerful simulation package for analysis of complex hydrodynamic, water quality, and ecological conditions in riverine and estuarine systems. DSM2 currently consists of three modules: HYDRO, QUAL, and PTM. The relationship between HYDRO, QUAL and PTM is shown in Figure 1. HYDRO simulates one-dimensional hydrodynamics including flows, velocities, depth, and water surface elevations. HYDRO provides the flow input for QUAL and PTM. QUAL simulates one-dimensional fate and transport of conservative and non-conservative water quality constituents given a flow field simulated by HYDRO. PTM simulates pseudo 3-D transport of neutrally buoyant particles based on the flow field simulated by HYDRO. PTM has multiple applications ranging from visualization of flow patterns to simulation of discrete organisms such as fish eggs and larvae. A fourth module for sediment transport is currently being developed.

    HYDRO 1-D flow, velocity, depth, and water surface elevations*QUAL* 1-D fate and transport of conservative and non-conservative constituents*PTM* Pseudo 3-D transport of neutrally buoyant particles Figure 1: Schematic of DSM2 Modules

  • Historical

  • Forecasting

  • *Planning*DSM2 is usually used for three kinds of Delta simulations: historic conditions, forecasting future conditions (real-time), and planning studies (Figure 2 and Table 1). Each type of DSM2 study is briefly described below *Recreate Historic Conditions*Historical simulations replicate past operations, hydrologic conditions, water quality and Delta configurations. These historical simulations enable calibration and validation of the model by comparison of simulation results and field data. Historical simulations also augment available field data to provide a more spatially and temporally complete representation of the hydrodynamic and water quality conditions for that time period. Forecasting simulations, also known as real-time simulations, use recent field data and forecast data to project Delta conditions into the near future (typically one to ten weeks). Recently collected historical data provide current conditions for the Delta. Recent tidal elevations at Martinez are used with an astronomical tide forecast to project the Martinez tide into the near future. Corresponding hydrodynamic and water quality conditions in the Delta are then simulated. Forecasting simulations can assist State Water Project operations decisions. *Planning Studies of Hypothetical Conditions*Delta planning studies evaluate how hypothetical changes to factors such as hydrologic regimes, water quality standards, system operations, and Delta configurations may impact Delta conditions. To explore the impacts of a given scenario under various hydrologic conditions, DSM2 planning studies are typically run under a 16-year sequence of Delta inflows and exports derived from statewide water transfer and storage simulations using CalSim-II More information on CalSim-II can be found on the web at https://water.ca.gov/Library/Modeling-and-Analysis/Central-Valley-models-and-tools/CalSim-II . Planning simulations can use historical or astronomical tidal data which incorporate influences of the spring-neap tidal cycle or simulations can use an average repeating tide (typically the 19-year mean tide). Planning simulations typically assess impacts of proposed changes to Delta operations or configuration such as modified reservoir releases or dredging of channels. Planning study may also investigate impacts of hypothesized changes in the natural environment such as sea level rise. Historical Replicate historical conditions*Forecasting* Project conditions for the near future*Planning* Hypothetical Delta changes*DSM2* Modes of Operation

    Figure 2: DSM2 Modes of Operation Table 1: Parameter Descriptions for Three Modes of DSM2 Application

    Simulation Parameter

    Replicate Historic Conditions

    Forecasting Future Conditions

    Planning Studies for Hypothetical Conditions

    Boundary Tide

    Historic or astronomical tide

    Historic and projected astronomical forecast tide

    Historic, astronomical

    Input Data

    Historic inflows and exports Average Delta consumptive use

    Recent and current inflows and exports Average Delta consumptive use

    CalSim-II statewide operations studies provide inflows and exports Average Delta consumptive use

    Simulation Period

    1990-2001 are currently possible

    1-10 weeks into the future

    1976-1991 sequence from CalSim-II statewide operations studies

"},{"location":"tutorials/DSM2_Tutorial_Overview/#forecast","title":"Forecast","text":"

*Future Conditions*DSM2 Study Types DSM2 Study Types

"},{"location":"tutorials/Delta_Tutorial_8_-_Temperature_Simulation/","title":"Delta Tutorial 8 - Temperature Simulation","text":"

DSM2 can be used to simulate water temperature and transport of this property. It also is influenced with suspended particles and bio matter in the water and is provided as a module in DSM2.\u00a0

  • Hari Could you help outline the steps for a tutorial in temperature simulation ?
"},{"location":"tutorials/Delta_Tutorial_8_-_Temperature_Simulation/#step-by-step-guide","title":"Step-by-step guide","text":""},{"location":"tutorials/Delta_Tutorial_8_-_Temperature_Simulation/#related-articles","title":"Related articles","text":"
  • Page:

    Data Requirement

  • Page:

    Delta Tutorial 8 - Temperature Simulation

"},{"location":"tutorials/Delta_Tutorial_9_-_DO_Simulation/","title":"Delta Tutorial 9 - DO Simulation","text":"

DSM2 can be used to simulate dissolved oxygen levels in the water. This tutorial shows how to setup the input, run and retrieve the output from the model simulation.\u00a0

  • HariCould you help me outline the steps involved in doing a DO simulation ?
"},{"location":"tutorials/Delta_Tutorial_9_-_DO_Simulation/#step-by-step-guide","title":"Step-by-step guide","text":""},{"location":"tutorials/Delta_Tutorial_9_-_DO_Simulation/#related-articles","title":"Related articles","text":"
  • Page:

    Delta Tutorial 9 - DO Simulation

"},{"location":"tutorials/Tutorial_1_Channels/","title":"Tutorial 1: Channels","text":"

Task Run DSM2 for a steady boundary condition flow and salinity (EC-electrical conductivity) simulation for a simple straight channel grid\u00a0 Skills Gained

  • Get started with DSM2

  • Creating channels

  • Establishing initial and boundary conditions

    The purpose of this tutorial is twofold: to get a start with the DSM2 model and to get practice setting up channels. We will set up a simple channel-only grid with simple constant boundary conditions and run both HYDRO and QUAL. We will look at two formats for entering cross-section geometry (the new DSM2 single file format and CSDP [Cross Section Development Program] format) and we will familiarize ourselves with the\u00a0echo\u00a0output file that gives you a single-file complete record of all the input data used in a DSM2 module.\u00a0 For the tutorial, the channels have the following configuration and specifications: />

    Figure 1 - Simple channel configuration and specifications. Note that there are two cross-section geometries labeled A and B which will be specified later in this tutorial. In all the channels except Channel 5 the cross sections have been assigned at the midpoint of the channel. In Channel 5 the cross-sections are assigned at fractions 0.2 and 0.8 of the length of the channel measured from the upstream end. The DSM2 grid map includes arrows pointing from upstream to downstream, indicating the positive direction of flow.

    Overview of DSM2 Channel Cross Sections DSM2 assumes a piecewise linear cross-sectional bathymetry. Width, area and wetted perimeter are tabulated according to elevation. Each elevation lists the data (width) or cumulative data (wetted perimeter and area) below the given elevation. Anything above the top elevation is extrapolated using a slope given by a global scalar called levee_slope. Figure 2: Piecewise linear bathymetry For instance, for a cross section half way downstream in a fictitious channel 123 the five layers of a cross-section with elevations given by Figure 2, might be tabulated:

    XSECT_LAYER\nCHAN_NO DIST ELEV AREA WIDTH WET_PERIM\n123 0.5 -14.6 0.0 0.0 0.0\n123 0.5 -9.2 216.0 80.0 102.5\n123 0.5 -4.0 736.0 120.0 111.0\n123 0.5 9.5 2410.0 160.0 142.3\n123 0.5 12.0 3028.5 162.0 148.0

    The above table is in the single-file DSM2 cross-section format. An analogous table is produced by the Cross Section Development Program (CSDP). We will practice using both in the tutorial. The parameter levee_slope is seldom changed from its standard value of 0.33.

    The following steps will instruct you on how to create the channels, give them very simple boundary conditions and run the model.

  • Open the hydro input file and add parameters:

    1. For this tutorial, you will want to use Notepad++ (recommended\u00a0https://notepad-plus-plus.org/), Textpad or Emacs \u2013 some text editor that will not add special markup to your input.
    2. Navigate to the\u00a0${DSM2_home}\\tutorial\\simple\\t1_channels\u00a0directory and this directory will be referred to as the\u00a0tutorial directory\u00a0below.
    3. Open the\u00a0hydro.inp\u00a0file using one of the text editors recommended in 1a.
  • In HYDRO, add the Scalar Runtime information:

    1. DSM2 input files use a keyword based table structure. Each table begins with a keyword on the first line and column headings (called\u00a0field headers) on the second line. There are as many lines of data as needed in the middle of the table, and the table closes with an \"END\" line and a carriage return.

    2. In the\u00a0hydro.inp\u00a0file, locate the SCALAR table. Scalars are name-value pairs that control the model or define constants and runtime parameters. Some scalar parameters are already defined in the sample file.

    3. Add the following run date, run time and temporary directory scalars at the top of the\u00a0SCALAR\u00a0table and save.\u00a0

      Spaces or tabs can be used between values

      SCALAR\nNAME VALUE\nrun_start_date 01JAN1992 #scalars to be added\nrun_end_date 01MAR1992   #scalars to be added\nrun_start_time 0000      #scalars to be added\nrun_end_time 0000        #scalars to be added\ntemp_dir c:/temp\ntitle \"TUTORIAL SIMULATION ${DSM2MODIFIER}\" # [other scalars already included in the file]\nwarn_unchecked false\nEND\n

      Note that temp_dir should be set to a location with ample disk space for production runs. This is a scratch directory where DSM2 stores cached results.

  • *In HYDRO, add Channel information: *

    Next we will add a table of channels, including connectivity, and conveyance/dispersion parameters. We are also going to add the cross-section geometry using the XSECT_LAYER section, which is introduced in Version 8. (CSDP-styled input is discussed later).

    1. The\u00a0CHANNEL\u00a0table requires: a channel number, length, Manning's n, dispersion coefficient, node number to identify the upstream end and node number at the downstream end. Type the table and field headers for the CHANNEL table at the bottom of the\u00a0hydro.inp\u00a0file:

      CHANNEL\nCHAN_NO LENGTH MANNING DISPERSION UPNODE DOWNNODE\n
    2. Open\u00a0the file\u00a0channel_tutorial_starter.txt\u00a0and copy the data for channels 1-6 and channel 2004 from the CHANNEL table of the tutorial data file and paste it into the newly created CHANNEL table in your hydro.inp file.

    3. Type\u00a0END after the last row to end the table.

    4. Now\u00a0create the XSECT_LAYER, table which will contain one row for every vertical layer in every user-defined cross-section. This table is new in Version 8, and is intended to allow input to be represented in a single file and using a single input style \u2013 making archives and comparisons simpler. Below the CHANNEL table, create the skeleton for the XSECT_LAYER table:

      XSECT_LAYER\nCHAN_NO DIST ELEV AREA WIDTH WET_PERIM\n[data will go here]\nEND\n

      Typically in DSM2 input files, the order of the tables is not important. However, when one table refers to information defined in another table, the \"parent\" table with the definition typically appears first in the input file. In this case the CHANNEL table must be before the XSECT_LAYER table.

    5. In the first row, we will start defining a cross-section for channel #1. We will be entering three rows for Channel 1, each of which will have a \"1\" in the CHAN_NO column. The data will be located midway downstream along the channel, so in the\u00a0Distance (fraction) field, type\u00a00.5. The three rows of data are given below

      XSECT_LAYER\nCHAN_NO DIST ELEV AREA WIDTH WET_PERIM\n1 0.5 -24.0 0.0 40.0 40.0\n1 0.5 0.0 960.0 80.0 102.5\n1 0.5 20.0 2640.0 160.0 192.0\n
    6. Copy and paste the three rows of data for Channel 1 three times for Channels 2, 3 and 2004 and change the channel number. Note that changing the channel number to 2004 will shift the data so that it no longer lines up with rows above it. DSM2 reads the values in order and doesn't care about the spacing, but you can adjust the spacing for aesthetic reasons if you want and later we will encounter dsm2_tidy a utility for tidying up the tables automatically. Copy the three data lines one more time for Channel 5, this time changing the Channel number to 5 and the distance to 0.2.

    7. There is an additional cross-section given for Channel 5, cross-section \"B\". The cross section is located in Channel 5, 0.8 of the way from the upstream end to the downstream end as indicated on the schematic at the beginning of the tutorial. Enter the cross section as shown below.

      XSECT_LAYER\nCHAN_NO DIST ELEV AREA WIDTH WET_PERIM\n5 0.8 -20.0 0.0 60.0 60.0\n5 0.8 -4. 1120.0 80.0 97.74\n5 0.8 2.0 1660.0 100.0 121.06\n5 0.8 10.0 2700.0 160.0 183.16\n
    8. Copy the cross section data from Channel 5 Distance 0.8 to use it for Channel 6, but change the Distance to 0.5.

      \u00a0Make sure the table is terminated with an END line with a carriage return and save your file.\u00a0

  • In HYDRO, set the Boundary information:

    In this section we are going assign very simple boundary conditions to the upper and lower ends of the channel system.\u00a0 Note that if you do not set boundary conditions at the end of a channel, a \"no-flow\" boundary (Q=0.0) is assumed.

    1. The upstream boundary will be a constant inflow.

    2. In\u00a0hydro.inp, enter an input table for the inflow:

      BOUNDARY_FLOW\nNAME NODE SIGN FILLIN FILE PATH\nupstream_flow 1 1 last constant 200.\nEND\n

      This line assigns a constant inflow of 200.0 cfs to the upstream boundary. The NAME column will be used 1) to associate quality inputs with inflows and 2) for prioritizing data in multiple input files. The NODE field assigns the input to Node #1. The FILLIN field is an instruction to the model as to how to interpolate data in time, which is not relevant for a constant value. DSM2 assumes consistent units and typically simulates flows in cfs.

    3. Start an input table for the downstream stage boundary: The headers FILE and PATH are more intuitive for time varying boundary conditions where a file name and a file location (path) are specified for a file that contains the time varying information. For a constant boundary condition FILE is set to \"constant\" and PATH is set to the boundary condition value

    4. The downstream boundary will be a constant water surface (stage) boundary.

      BOUNDARY_STAGE\nNAME NODE FILLIN FILE PATH\n[data go here]\nEND\n
    5. In the BOUNDARY_STAGE table, enter the following values into the appropriate fields and save:

    \u00a0Although spaces or tabs can be used, columns with spaces tend to look better when opened in a different viewer. You can use the dsm2_tidy utility to clean up columns and spaces. Type dsm2_tidy --help at a command prompt for more info.

        1. Input Name:\u00a0downstream_stage

        2. Node:\u00a07

        3. Fillin:\u00a0Last

        4. Input File:\u00a0constant

        5. Path/Value:\u00a00.0

        6. END the table and save the file.

  • *In HYDRO, set the Initial Conditions for stage and flow: *

    A default hydrodynamic initial condition is required for every channel in DSM2. The initial condition can be replaced using a restart file, but the default must still be entered now. For each of the channels, the stage and flow will be set to\u00a00. These\u00a00-values will be applied at both the\u00a00\u00a0and\u00a0length\u00a0(distance to downstream end of channel) distances along the channel. With six channels, and two locations to set the values, there will be a total of 12 rows.

      1. In the hydro.inp file, start the initial condition table:

      CHANNEL_IC CHAN_NO DISTANCE STAGE FLOW 1 0 0.0 0.0 1 length 0.0 0.0 [further data will go here] END

    Copy the two lines of data and paste them into the input file for all of the channels. Refer back to Figure 1 for the channel numbers.

  • **In HYDRO, Specify the Output Locations: ****

    Lastly, we specify the output locations. For this tutorial, we will request flow and stage at the two boundaries, two locations along Channel 2, and the beginning of Channel 2004. These choices will be used to illustrate some points in a later tutorial when we look at Layering. Feel free to add anything that interests you.

      1. In\u00a0hydro.inp, create the skeleton OUTPUT_CHANNEL table using the following header:

      OUTPUT_CHANNEL NAME CHAN_NO DISTANCE VARIABLE INTERVAL PERIOD_OP FILE [data will go here] END

      1. The output request rows may be found in the file output_channel_tutorial.inp. Copy them into\u00a0hydro.inp.
      2. Save and close the\u00a0hydro.inp\u00a0file.
  • In QUAL, add the Scalar Runtime information:

      1. The file\u00a0qual.inp\u00a0already has a SCALAR section. Add the following run time and temporary directory SCALARS above the others:

      SCALAR NAME VALUE run_start_date 02JAN1992 run_end_date 01MAR1992 run_start_time 0000 run_end_time 0000 temp_dir c:/temp [Existing scalars] END

  • *In QUAL, set the Boundary Concentration information: *

      1. Boundary conditions in QUAL for the constituent\u00a0ec\u00a0are specified in the NODE_CONCENTRATION table:

      NODE_CONCENTRATION NAME NODE_NO VARIABLE FILLIN FILE PATH END

    The names of the inputs must be EXACTLY the same as given in hydro \u2013 this is how input concentrations are matched with input flows.

      1. In the\u00a0Node Concentration\u00a0table, add an upstream concentration row. The name for this boundary condition must match the corresponding boundary in hydro \u2013 this name-matching is how flows and concentrations are paired. See section 2.b for the NAME used in this tutorial and Figure 1 for the node numbers. In the new row, enter the following information into the appropriate fields: 1. 1. Input Name:\u00a0upstream_flow. 2. Node:\u00a01 3. Variable:\u00a0ec 4. Fillin:\u00a0last 5. \u00a0The period after the value is to indicate it is not an integer.Input File:\u00a0constant 6. Path/Value:\u00a0200

        \u00a0DSM2 does not care what units are used for constituent concentrations, but all concentrations must be in the same units. For\u00a0ec, uS/cm are typically used.

      1. In the\u00a0Node_Concentration\u00a0table in\u00a0qual.inp, add a downstream boundary concentration row. The downstream concentration is going to be higher than the upstream one since we are later going to turn this into a tidal boundary in a later tutorial. Enter the following information into the next row of the table: 1. 1. Input Name:\u00a0downstream_stage. 2. Node:\u00a07 3. Variable:\u00a0ec 4. Fillin:\u00a0last 5. Input File:\u00a0constant 6. Path:\u00a030000
      2. Save the current settings.
  • *In QUAL, Specify Output Locations: *

    In QUAL, you can request\u00a0 1) concentration data, 2) concentration data with source tracking or 3) flow and stage data (which can be confusing if not output at the model time step). In this tutorial, our requests will include\u00a0ec\u00a0at the two boundaries, two locations along Channel 2, and the beginning of Channel 2004.

      1. In\u00a0qual.inp, create a QUAL Output table:
      2. In the\u00a0OUTPUT_CHANNEL\u00a0table, add the following lines:

      OUTPUT_CHANNEL NAME CHAN_NO DISTANCE VARIABLE INTERVAL PERIOD_OP FILE bnd_1 1 0 ec 15min inst ${QUALOUTDSSFILE} bnd_6 6 length ec 15min inst ${QUALOUTDSSFILE} chan2_half 2 7500 ec 15min inst ${QUALOUTDSSFILE} chan2_length 2 length ec 15min inst ${QUALOUTDSSFILE} chan2004 2004 0 ec 15min inst ${QUALOUTDSSFILE} END

      1. Save and close the file.
  • *Running HYDRO and QUAL *

    DSM2v8 runs hydro and qual sequentially. The hydrodynamic data from the hydro run is an input to the qual simulation.

    \u00a0Hydro can be run without qual, but can qual be run without hydro?\u00a0 \u00a0The DSM2 tutorials assume that you have activated Microsoft's power tool\u00a0Open Command Window Here. To get this and other recommended 3rd\u00a0party extras for DSM2, go to the\u00a0Recommended Third Party Extras\u00a0section of the DSM2 documentation by clicking on the START menu and selecting\u00a0START MENU \uf0f0 Programs \uf0f0DSM2_v8 \uf0f0 DSM2_documentation If you do not want to install the\u00a0Open Command Window Here\u00a0tool, then you can use a command shell and change directories to the indicated directory. To open a command shell, click on the START menu and select\u00a0Run. In the box type\u00a0cmd\u00a0if it does not come up as the default. Click on OK.

      1. In Windows Explorer, navigate to the directory: _

        Unknown macro: {DSM2_home}tutorialsimple{_}. 2. shift+Right-click on the directory,\u00a0t1_channels, and select\u00a0Open Command Window Here. \">

      1. In the command window, type:\u00a0hydro hydro.inp\u00a0and press enter.

    Note that several lines will appear in the command window very quickly. There may then be a delay while data is processed. Then \"Starting hydro computations for time X\" will appear. A successful model run is completed after a \"Normal program end\" statement and the command prompt returns.

      1. HYDRO will then run(it may take a few minutes) and create an\u00a0output.dss\u00a0file in the same directory.
      2. To run QUAL, in the command window, type:\u00a0qual qual.inp.
      3. QUAL will then run and add output to the\u00a0output.dss\u00a0file. A successful qual run will produce a \"Normal program end\" statement and return to the command prompt. Qual takes longer to run than hydro did.
      4. Open the\u00a0output.dss\u00a0file and examine the results.
  • *CSDP style cross-sections *

    You can also run the model using cross-sections in the CSDP format. This is the form most familiar to DSM2 users. Mixing CSDP format with other formats may produce unpredictable results. \u00a0Two caveats. First, there are no\u00a0rectangular cross-sections\u00a0in Version 8. The rectangular and irregular cross-sections in Version 6 were not consistent: a regular cross-section and its equivalent representation in the irregular format did not give the same result. The discrepancy was due to different interpolation rules. In Version 8, we have dropped the \"irregular\" nomenclature because this is the only kind of cross section we support. The practical consequence of the change is that you are going to need a cross-section for every channel, and to get this you will need a data set targeted at Version 8. The Version 8 cross sections for the Delta are provided in the advanced tutorials. In the tutorial, you will find that the CSDP version of the cross sections are represented in two files: xsect_a.txt and xsect_b.txt. Recall that earlier in the tutorial the single file format cross sections were specified in the\u00a0hydro.inp\u00a0file. Now we will create a new launch file called hydro_csdp.inp that is going to reference the text files instead of listing the data explicitly.

      1. Copy\u00a0hydro.inp\u00a0to\u00a0hydro_csdp.inp \u00a0\u00a0it doesn't matter what you name the file, but don't skip this step
      2. In\u00a0hydro_csdp.inp, erase the XSECT_LAYER table and replace it with the following XSECT table that will point to the cross-section files.

      XSECT CHAN_NO DIST FILE 1 0.5 xsect_a.txt [other xsects go here] END

    When running DSM2v8, use either the Version 8 format (XSECT_LAYER from section 3 in this tutorial) or use the CSDP format presented in this part of the tutorial. DO NOT MIX AND MATCH IN ONE FILE.

      1. Create the table using the same channel-distance combinations as we used before. Use cross-sections A and B as designated in Figure 1.
      2. In the IO_FILE table, change the name of the echoed output file to hydro_echo_csdp.inp. As a bonus exercise you could change the environmental variables to accomplish nearly the same thing.
  • *Rerun HYDRO and compare cross-sections formats *

    Now we want to run hydro with the alternate input from CSDP. To verify that we get the same cross-sections using the CSDP format, we are going to scrutinize the echo input file.

      1. Open the echoed input file from your first run. The file name is\u00a0channel_hydro_echo.inp. Do a search for XSECT_LAYER. This file echoes the input used on your previous run, and is what we are trying to match.
      2. Rerun hydro using the command:

    hydro hydro_csdp.inp

      1. Compare the echoed cross-sections to those in\u00a0hydro_echo_csdp.inp\u00a0Use your text editor or a \"diff\" tool.
  • *Run HYDRO using echoed input. *

    Finally, let's take a look at the echoed output file and verify that it is an exact one-file replica of the *.inp data that went into the run. This is a powerful archiving option.

      1. Rerun hydro using\u00a0hydro.inp.
      2. Open\u00a0channel_hydro_echo.inp.
      3. Locate the IO_FILE section and change the name of the echoed input file (first entry) to echo_echo.inp.
      4. Save and close channel_hydro_echo.inp
      5. Run the model using\u00a0channel_hydro_echo.inp. At a command prompt type:

    hydro channel_hydro_echo.inp

    1. Compare the output from your first run (channel_hydro_echo.inp) to the second run (echo_echo.inp). Are they the same?.\u00a0

  1. Brain teasers
    1. What is the actual delta-x between computational points for each of the subreaches (channels 1-6)?
    2. (Advanced \u2013 for hydrodynamics people) Why is the requested dx the minimum spatial step for each reach? Isn't finer better? Wouldn't you want to impose a maximum on how big dx can be?
    3. Change the bottom elevation of one of the cross-sections in the tutorial by lowering it 5ft. Do not alter the other vertical layers in the cross-section. For a typical water surface you will not be altering the properties of the cross-section. Can you think of two ways you\u00a0are\u00a0changing the simulation? Are they both \"real\"? What are the implications for representing a dredged channel in a study?
"},{"location":"tutorials/Tutorial_2_Reservoirs_Gates_Transfers/","title":"Tutorial 2: Reservoirs, Gates, Transfers","text":"

Task Add reservoirs, gates and object to object flow transfers to the simple channel grid created in tutorial 1\u00a0 Skills Gained

  • Understanding of how reservoirs and gates are represented in DSM2
  • Learn how to transfer flow from one reservoir or node to another reservoir or node in DSM2 The purpose of this tutorial is to learn how to add reservoirs, gates, and flow transfers to the simple channel-only grid created in Tutorial 1 (Figure 1). The grid we are going to create has the following configuration and specifications: The channel portion is identical to the simple channel model from Tutorial 1. Note that each tutorial is self contained, so it is not necessary to do Tutorial 1 before completing this tutorial. Figure 1- Simple channel with a new reservoir, gate, and flow transfer. The following steps will instruct you on how to create these new features and add them to the simple channel system.\u00a0

    DSM2 Definitions Reservoir In DSM2, reservoirs are open bodies of water that store flow and are connected to nodes by means of an energy-based equation. This means that flow moves between the reservoir and its connected node or channel whenever there is an energy imbalance (e.g. stage difference). Reservoirs are considered instantly well-mixed. The Reservoirs Table specifies the identity and physical properties of the reservoir. Connections to nodes are specified in the Reservoir Connections table. If it is desired to regulate flow between a reservoir and its connected node or channel, a gate device is used.\u00a0 In DSM2 applications for the Delta, reservoirs are used for actual reservoirs such as Clifton Court Forebay and for open water bodies such as flooded islands.\u00a0 Gate In DSM2, gates are sites that present a barrier or control on flow. A gate may have an arbitrary number of associated hydraulic devices (pipes and weirs), each of which may be operated independently to control flow.\u00a0 In DSM2 applications for the Delta, gates are used to represent the Delta Cross Channel, the Montezuma Slough Salinity Control Gates, and permanent or temporary barriers.\u00a0 Object to Object Flow Transfer Transfers are direct water connections from a reservoir or node to another reservoir or node. Transfers are instantaneous movements of water (and its constituents and particles) without any detailed description of physics or storage. The Transfer table specifies the connectivity of the transfer.\u00a0 In DSM2 applications for the Delta, object to object transfers have been used to represent proposed peripheral canal withdrawal and outflow locations.\u00a0

  • Create the reservoir:

    1. In Windows Explorer, navigate to the directory:\u00a0\\{DSM2_home}\\tutorial\\simple\\t2_reservoir_gate_transfer.
    2. Open\u00a0hydro.inp.\u00a0At the bottom of the file, Add the skeleton for the reservoir table:

RESERVOIR NAME AREA BOT_ELEV END

    1. Enter the following values into the appropriate fields: 1. Name:\u00a0res_1 2. Area (million sq ft):\u00a040 3. Bottom elev (ft):\u00a0-24
    2. Note from Figure 1 that the reservoir has two connections; one at Node 3, and one at Node 4. These will go in a child table called RESERVOIR_CONNECTION. Some DSM2 input data tables are related to each other in what is referred to as a parent/child relationship. In the case of reservoirs, the RESERVOIR table is the parent table and the RESERVOIR_CONNECTIONS table is the child table that provides additional information related to the information in the parent table. The parent table must appear in the input file prior to the child table. The header has the following form:

RESERVOIR_CONNECTION RES_NAME NODE COEF_IN COEF_OUT END

    1. Enter the following values into the appropriate fields for the first connection: 1. Res Name: res_1 2. Node:\u00a03 3. Res Coef (in):\u00a0200 4. Res Coef (out):\u00a0200
    2. Enter the following values into the appropriate fields for the second connection:
      1. Res Name: res_1
      2. Node:\u00a04
      3. Res Coef (in):\u00a0200
      4. Res Coef (out):\u00a0200
    3. Save the current settings.

\u00a0To ensure conservation of mass at the beginning of a DSM2 simulation, it is good practice to set appropriate initial conditions. It is recommended to set all flows to zero and reservoir stage to zero.

  1. Add Initial Conditions for the Reservoir:
    1. Create the\u00a0Reservoir Initial Conditions table:
      1. The header and data are

RESERVOIR_IC RES_NAME STAGE res_1 0.0 END\u00a0

  1. Create the Gate:
    1. Now we are going to create the GATE table and its child table GATE_DEVICE. Note from Figure 1 that the gate is located at Node 2 of Channel 2. This gate consists of both a weir and a pipe. Therefore, two rows of information will be needed for the\u00a0GATE_DEVICE\u00a0table.
    2. At the bottom of hydro.inp, add the skeleton for the GATE table:

GATE NAME FROM_OBJ FROM_IDENTIFIER TO_NODE END

    1. In the\u00a0Gates table: 1. Add a row and enter the following values into the appropriate fields: 1. Name:\u00a0gate_1 2. From object:\u00a0channel 3. From identifier:\u00a02 [note that this 2 refers to channel 2] 4. to Node:\u00a02 [note that this 2 refers to node 2] 2. Create a GATE_WEIR_DEVICE table with the following fields:

GATE_NAME, DEVICE, NDUPLICATE, WIDTH, ELEV, HEIGHT, CF_FROM_NODE, CF_TO_NODE, DEFAULT_OP

  1. Enter the following values into the appropriate fields:

    1. Gate Name: gate_1
    2. Device:\u00a0weir
    3. NDuplicate:\u00a02
    4. Width:\u00a020
    5. Elev:\u00a02
    6. Height:\u00a09999.0
    7. CF from Node:\u00a00.8
    8. CF to Node:\u00a00.8
    9. Default Op:\u00a0gate_openNote: don't forget to close your table with END. How many weirs does this gate have? Hint: check out the value for number of duplicates
  2. Create a GATE_PIPE_DEVICE table by looking up the appropriate headers in the DSM2 documentation. All table headers have to be in capital letters.

    1. Again, in the\u00a0Gate Devices\u00a0table:

      1. On a new line enter the following values into the appropriate fields:
        1. Gate Name: gate_1
        2. Device Name:\u00a0pipe
        3. Number of duplicates:\u00a02
        4. Radius:\u00a02
        5. Elevation:\u00a02
        6. Flow coefficient from Node:\u00a00.8
        7. Flow coefficient to Node:\u00a00.8
        8. Default Operation:\u00a0gate_open
    2. Save the current settings.\u00a0\u00a0 How would you change the gate device table to only allow flow in one direction? Hint: review gate operation options in the documentation.

  3. Create the Transfer:

A transfer is a momentum-free transfer of water from one node or reservoir to another node or reservoir. We are going to create a continuous transfer of 40cfs of water from the reservoir res_1 to node 6.

  1. Below the gate input, create the\u00a0TRANSFER table
    1. The headers are:

TRANSFER NAME FROM_OBJ FROM_IDENTIFIER TO_OBJ TO_IDENTIFIER END

  1. Enter the following values into the appropriate fields: 1. Name:\u00a0transfer_1 2. From Object:\u00a0reservoir 3. To identifier:\u00a0res_1 4. To Object:\u00a0node 5. To identifier:\u00a06

    1. Save the current settings.
  2. Add the Transfer Flow Time Series:

We have created the transfer physically, but we have not assigned it a flow. This is done on a separate table, so that the specifications of the transfer can be used with different operations or hydrologies. Flow will be 40cfs.

  1. In hydro.inp, create the\u00a0Transfer Time Series\u00a0table:
    1. The headers are:

INPUT_TRANSFER_FLOW TRANSFER_NAME FILLIN FILE PATH END

  1. Enter the following values into the appropriate fields: 1. Input Name:\u00a0transfer_1 2. Fillin:\u00a0last 3. Input File:\u00a0constant 4. Path/Value:\u00a040

    1. Save the current settings.

\u00a0How would you change the flow transfer from a constant value to a time varying value? Note: the values shown in the last two columns are descriptions of the information that would go in that field; they are not actual field values. See Basic Tutorial 4 for more information on using time series data in DSM2.\u00a0

  1. Running HYDRO and QUAL

    1. In Windows Explorer, navigate to the directory: _

      Unknown macro: {DSM2_home}tutorialsimple{_}. 2. Right-click on the directory,\u00a0t2_reservoir_gate_transfer, and select\u00a0Open Command Window Here. 3. In the command window, type:\u00a0hydro hydro.inp. 4. In the command window, type:\u00a0qual qual.inp. 5. Open the\u00a0output.dss\u00a0file in the\u00a0t2_reservoir_gate_transfer\u00a0directory, and examine\u00a0 the results.

  2. Brain teasers

    1. The equation for inflow from a node to a reservoir through a gate is as follows:\u00a0\u00a0

      Lookup the equation for a reservoir connection in the documentation. Write it down next to the gate equation. Assuming they both represent the same basic orifice physics, what terms in the gate equation does the reservoir coefficient C lump together? 2. Clifton Court Forebay has five duplicate radial gates connecting it to the surrounding channel. Each has a crest elevation of -10.1ft and a width of 20ft: 1. If water is at 0 ft and the five gates are open, what is the area exposed to flow? 2. If the weirs are perfectly efficient (no loss, coefficients of 1.0), what would be the equivalent \"lumped\" reservoir coefficient for these gates? 3. DSM2 version 6 had a calibrated reservoir coefficient of 1800^\uf02a^ Note that the value of 1800 is the DSM2v6 value of 2400 adjusted so that it matches the reservoir equation and v8. In version 6 the coefficient was multiplied by an undocumented 0.75 factor. DSM2v8 uses the reservoir coefficient as specified.. Was this value physical given the assumptions of the model? What alternate value might you use? Why might the version 6 value have been acceptable \u2013 are there explanations having to do with the model assumptions?

"},{"location":"tutorials/Tutorial_3_Layering/","title":"Tutorial 3: Layering","text":"

Task

  • Separate DSM2 input data into multiple input files
  • Use layers in DSM2 to group related items

Skills Gained Learn how to use layering in DSM2 to add, change and delete features in a DSM2 simulation, for example including a new reservoir in a simulation

The purpose of this tutorial is to demonstrate the use of layering to structure your project. Layers are part of the DSM2 data management system. They allow input items to be grouped in logical bundles, and allow changes to be brought into an old simulation without erasing or altering archived items. At the same time we will neaten up our input by dividing it into several files that are \"included\" from a fairly sparse primary file. The layering concept will be demonstrated by adding a \"dummy\" reservoir connected to nodes 5 and 6 (Figure 1) that will be \"turned on\" or \"turned off\" in a simulation. We will also use DSM2MODIFIER to differentiate between alternative simulations.

Figure 1: Simple channel with a reservoir, gate, flow transfer and dummy reservoir.

  1. Convert the previous hydro.inp GRID items to external files

In order to use layers, the input tables have to be gathered into individual input files.\u00a0 \u00a0Key points about layering:

  • Each file represents a layer
  • Information in the launch file (hydro.inp\u00a0or\u00a0qual.inp) supercedes all other input information.
  • For include blocks, files that are read later replace files that are read earlier, in other words, if the same type of input information exists in more than one file, the last information read will overwrite the previously read values.
  • Overriding values is based on an identifier (e.g.NAME or NAME and VARIABLE\u2014identifiers are listed in table reference documentation)
  • Parent and child tables (e.g. channel and xsect) must be grouped in the same file.
  • If a parent item is overridden, all of the child items associated with the overridden parent item are ignored.
    1. Move the channel and reservoir data:
      1. Navigate to the t3_layering directory.
      2. Create a new file in Notepad++ called\u00a0grid_tutorial_base.inp
      3. Open\u00a0hydro.inp.
      4. Locate the CHANNEL and XSECT_LAYER tables in\u00a0hydro.inp.\u00a0Cut\u00a0them and paste them into\u00a0grid_tutorial_base.inp.
      5. Locate the RESERVOIR and RESERVOIR_CONNECTION tables in\u00a0hydro.inp.\u00a0Cut\u00a0them and paste them into\u00a0grid_tutorial_base.inp\u00a0and save the file. Note: leave the RESERVOIR_IC in the\u00a0hydro.inp\u00a0file.
      6. Similarly move the TRANSFER and GATE information from\u00a0hydro.inp\u00a0to\u00a0grid_tutorial_base.inp.\u00a0 Be sure to move the GATE child tables too. Leave the INPUT_TRANSFER_FLOW table in the\u00a0hydro.inp\u00a0file.
      7. Make sure the data tables listed above have been removed from\u00a0hydro.inp.
      8. Now add these lines to\u00a0hydro.inp\u00a0that will tell DSM2 you want to include data from other files and that these files will contain GRID (channel, reservoir, transfer and gate) tables and their child tables. Add the GRID table after the IO_FILE block and before any of the initial condition blocks.

GRID grid_tutorial_base.inp END \u00a0Be sure that there is a carriage return at the end of each *.inp file.\u00a0

  1. Running HYDRO and QUAL with grid information in separate files

This simulation will serve as the base case for comparison for the other simulations run in this tutorial. We will use the DSM2MODIFIER to differentiate between the various simulations. DSM2MODIFIER is a special ENVVAR definition that is automatically used by DSM2 to mark output (the F Part of the DSS Path).

    1. In the ENVVAR section of\u00a0hydro.inp\u00a0and\u00a0qual.inp, change DSM2MODIFIER to layers_base and save the files.

    2. In Windows Explorer, navigate to the directory: _ Unknown macro: {DSM2_home}tutorialsimple{_}

      .

        1. Right-click on the directory,\u00a0t3_layering, and select\u00a0Open Command Window Here. Note: for computers running Vista, use a shift+right click on the directory name to get the Open Command Window.
        2. In the command window, type:\u00a0hydro hydro.inp.
        3. In the command window, type:\u00a0qual qual.inp.
        4. Note that many of output files all use the DSM2MODIFIER in their name, e.g.\u00a0layers_base.out. The\u00a0output.dss\u00a0file distinguishes between scenarios by using the DSM2MODIFIER in the F-Part. Open the\u00a0output.dss\u00a0file in the\u00a0t3_layering\u00a0directory, and examine the results.
      1. Creating a new reservoir:

      In this section, we will learn how to add a feature by adding a new reservoir. We don't want to mess too much with what we have already, so we are going to add a dummy reservoir in our grid_tutorial_base layer. Later in this tutorial, we will learn how to use layers to disable this feature as well.\u00a0 \u00a0The ability to mask and delete features such as reservoirs and gates in DSM2 is often used in planning runs to \"turn on\" and \"turn off\" features when studying planning alternatives.\u00a0

          1. Create a new Reservoir in\u00a0grid_tutorial_base 1. In tutorial_grid_base.inp, enter data for the new reservoir below the data for res_1 2. Name:\u00a0dummy_res 3. Area (million sq ft):\u00a060 4. Bottom elev (ft): -30
        1. In the\u00a0Reservoir Connection\u00a0table:

          1. Enter the following values into the appropriate fields:
            1. Reservoir name: dummy_res
            2. Node:\u00a05
            3. Res Coef (in):\u00a0220
            4. Res Coef (out):\u00a0220
        2. Again, in the\u00a0Reservoir Connection\u00a0table:

          1. Enter the following values into the appropriate fields:
            1. Reservoir name: dummy_res
            2. Node:\u00a06
            3. Res Coef (in):\u00a0220
            4. Res Coef (out):\u00a0220
        3. Save the current settings.

      1. Running HYDRO and QUAL with the new reservoir

      This simulation is our first alternative which adds a reservoir. We will use the DSM2MODIFIER to differentiate this simulation from the base simulation.

        1. In the ENVVAR section of\u00a0hydro.inp\u00a0and\u00a0qual.inp, change DSM2MODIFIER to layers_dummyres and save the files.
        2. In Windows Explorer, navigate to the directory: _

      .

    3. Right-click on the directory,\u00a0t3_layering, and select\u00a0Open Command Window Here. For Vista shift+right click on directory to get the Open Command Window.

    4. In the command window, type:\u00a0hydro hydro.inp.

    5. In the command window, type:\u00a0qual qual.inp.

    6. Note that many of output files all use the DSM2MODIFIER in their name, e.g.\u00a0layers_dummyres.out. Compare the\u00a0layer_base.out\u00a0and the\u00a0layer_dummyres.outechoed input files to make sure that the dummy reservoir was included in the simulation. The\u00a0output.dss\u00a0file distinguishes between scenarios by using the DSM2MODIFIER in the F-Part. Open the\u00a0output.dss\u00a0file in the\u00a0t3_layering\u00a0directory, and look for results from the base run and from the new dummyres simulation.

  1. Disabling a reservoir using a revision layer

In this step of the tutorial, we will disable (remove) the dummy reservoir from the simulation using a revision layer. Revision layers allow the user to add or remove features for alternatives without altering the base input files.

    1. Create a Reservoir Revision Layer: 1. Create a file called\u00a0grid_tutorial_revision.inp.\u00a0Add this file to your GRID include-file section in\u00a0hydro.inp, which will now look like this:

GRID grid_tutorial_base.inp grid_tutorial_revision.inp END \u00a0The include files will be prioritized in the order they are read, later files replacing earlier ones. In this example, the information in\u00a0grid_tutorial_revision.inp\u00a0has priority over\u00a0grid_tutorial_base.inp, thus any duplicate information in\u00a0grid_tutorial_revision.inp\u00a0will override the information in\u00a0grid_tutorial_base.inp. When a parent table identifier (usually a channel/node number or a \"name\") is overridden by a later file, its original data (including child tables) will be ignored. Everything will come from the higher priority layer.

      1. Copy the reservoir table header and dummy reservoir data from\u00a0grid_tutorial_base.inp\u00a0to\u00a0grid_tutorial_revision.inp. It is important to copy both the parent (RESERVOIR) and the child (RESERVOIR_CONNECTION) tables into the revision layer. 2. Add a carat (^ shift and 6 key) before the reservoir name in the parent table. Your entry should look like this:

RESERVOIR NAME AREA BOT_ELEV ^dummy_res 60.0 -30.0 END\u00a0 RESERVOIR_CONNECTION RES_NAME NODE COEF_IN COEF_OUT dummy_res 5 220.0 220.0 dummy_res 6 220.0 220.0 END

    1. By overriding the name \"dummy_res\" and also marking it unused, you have now effectively removed dummy_res from the calculations. The child table is automatically ignored as well (so in a sense the entries there are unnecessary).\u00a0\u00a0What is the difference between commenting out \"dummy_res\" in the revision layer and using a carat (^) in the revision layer? Answer:\u00a0Commenting out the reservoir in the revision layer will be like the revision never existed and the information from the original grid layer will be used in the simulation. Using the carat (^) will \"turn off\" that reservoir for the simulation. Neither the information in the original grid layer or in the revision layer will be used in that simulation. Thus using the carat is a way to \"turn on or off\" alternative components.
    2. Save the current settings.
  1. Running HYDRO and QUAL disabling the new reservoir

    1. In the ENVVAR section of\u00a0hydro.inp\u00a0and\u00a0qual.inp, change DSM2MODIFIER to layers_nodummyres and save the files.

    2. In Windows Explorer, navigate to the directory: _ Unknown macro: {DSM2_home}tutorialsimple{_}

      .

        1. Right-click on the directory,\u00a0t3_layering, and select\u00a0Open Command Window Here. For Vista shift+right click on directory to get the Open Command Window.
        2. In the command window, type:\u00a0hydro hydro.inp.
        3. In the command window, type:\u00a0qual qual.inp.
        4. Compare the\u00a0layer_base.out,\u00a0layer_dummyres.out, and layer_nodummyres.out\u00a0echoed input files and the\u00a0output.dss\u00a0file. Are the results the same for the base simulation and the no dummy reservoir simulation?
      1. Changing the properties of a reservoir

      This part of the tutorial demonstrates how a revision layer can be used to change the properties of a simulation. In this case the area of reservoir 1 is increased.

        1. Altering the Properties of the Original Reservoir res_1: 1. In the\u00a0Reservoirs\u00a0table of grid_tutorial_revision.inp, change the\u00a0Area (million sq ft)\u00a0field of res_1 from\u00a040\u00a0to\u00a050. 2. Copy the RESERVOIR_CONNECTION entries for res_1 from grid_tutorial_base to grid_tutorial_revision. The revision layer should look similar to the one below.

      RESERVOIR NAME AREA BOT_ELEV\u00a0 res_1 50.0 -24.0\u00a0 ^dummy_res 60.0 -30.0 END\u00a0 RESERVOIR_CONNECTION RES_NAME NODE COEF_IN COEF_OUT res_1 3 200.0 200.0\u00a0 res_1 4 200.0 200.0 dummy_res 5 220.0 220.0\u00a0 dummy_res 6 220.0 220.0\u00a0 END\u00a0\u00a0

      Why is it necessary to copy the reservoir connection entries to the revision file? Answer:\u00a0When you override a layer (file) with another entry in a parent table that has the same identifier, you COMPLETELY replace that item in the new layer including child items. In other words, if the child table-RESERVOIR_CONNECTIONS in this case-is not included in the revision layer, the reservoir will have no connections. The values in the original grid layer will not be read.

        1. Save the current settings.
      1. Running HYDRO and QUAL with increased area for reservoir 1

        1. In the ENVVAR section of\u00a0hydro.inp\u00a0and\u00a0qual.inp, change DSM2MODIFIER to layers_larger_res1 and save the file.
        2. In Windows Explorer, navigate to the directory: _

      .

    3. Right-click on the directory,\u00a0t3_layering, and select\u00a0Open Command Window Here. For Vista shift+right click on directory to get the Open Command Window.

    4. In the command window, type:\u00a0hydro hydro.inp.

    5. In the command window, type:\u00a0qual qual.inp.

    6. Compare the output to the earlier simulations.

  2. Changing the name of Channel 2004:

In this step, we will replace the channel number of Channel 2004. In this case, what we are changing is the identifier itself, rather than the parameters and data. So what we will do is delete Channel 2004 and put in a Channel 4 that is identical. In the process, we will ignore this change in other parts of the input and see what happens to initial conditions and output requests that reference a non-existent channel.

    1. Keep the\u00a0grid_tutorial_revision\u00a0file open.
    2. Copy the channel and xsect data from\u00a0grid_tutorial_base.inp\u00a0to the beginning of\u00a0grid_tutorial_revision.inp. Keep only channel 2004.
    3. In\u00a0grid_tutorial_revision.inp\u00a0in the CHANNEL and XSECT tables, copy the data for Channel 2004 and paste another copy into those tables.
    4. In one of your two copies of channel 2004, change the channel number in both tables to 4.
    5. Eliminate channel 2004 by prepending a carat in the CHANNEL table. Your revision should look like this:

CHANNEL CHAN_NO LENGTH MANNING DISPERSION UPNODE DOWNNODE 4 15000 0.035 0.3 4 5\u00a0 ^2004 15000 0.035 0.3 4 5\u00a0 END\u00a0

XSECT_LAYER CHAN_NO DIST ELEV AREA WIDTH WET_PERIM 4 0.5 -24.0 0.0 40.0 40.0\u00a0 4 0.5 0.0 960.0 80.0 91.22\u00a0 4 0.5 20.0 2640.0 160.0 133.6\u00a0 2004 0.5 -24.0 0.0 40.0 40.0\u00a0 2004 0.5 0.0 960.0 80.0 91.22\u00a0 2004 0.5 20.0 2640.0 160.0 133.6\u00a0 END\u00a0

    1. Save your work. Note that the entries in XSECT_LAYER for channel 2004 in the\u00a0grid_tutorial_revision.inp\u00a0are redundant since the channel was disabled. However it is good practice to always include full parent/child table groups in the revision layer so that choices can be turned \"on\" or \"off.\"
  1. Add Initial Conditions for the New\u00a0Channel 4:

Since there is no default initial condition for channel 4, we will have to add one. Similar to the other channels, we will use a zero flow initial condition.

    1. Create a file called\u00a0channel_ic_revision.inp.
    2. Copy the CHANNEL_IC table headers from\u00a0hydro.inp\u00a0to the new file.
    3. Create two rows of data for channel 4:

CHANNEL_IC CHAN_NO DISTANCE STAGE FLOW 4 0 0.0 0.0 4 length 0.0 0.0 END\u00a0

    1. In the\u00a0hydro.inp\u00a0file create an INITIAL_CONDITION include block underneath the GRID include block:

INITIAL_CONDITION channel_ic_revision.inp END

    1. Now every channel has an initial condition. Do you need to do something about the \"extra\" initial condition for Channel 2004? Try and see.
  1. Running HYDRO and QUAL

    1. In the ENVVAR section of\u00a0hydro.inp\u00a0and\u00a0qual.inp, change DSM2MODIFIER to layers_ch2004_to_ch4 and save the files.

    2. In Windows Explorer, navigate to the directory: _ Unknown macro: {DSM2_home}tutorialsimple{_}

      .

        1. Right-click on the directory,\u00a0t3_layering, and select\u00a0Open Command Window Here.
        2. In the command window, type:\u00a0hydro hydro.inp.
        3. In the command window, type:\u00a0qual qual.inp.
        4. Open the\u00a0output.dss\u00a0file in the\u00a0t3_layering\u00a0directory, and examine the results.
        5. Open\u00a0layers_ch2004_to_ch4_hydro_echo.inp. This is an \"echoed input\" that replicates your input verbatim, except ENVVAR replacements have been made and all the channel xsects are in the one-file format. You should be able to run the model using this file as easily as with the original hydro.inp. Take a look and see:
          1. Did channel 4 get in the input?
          2. Did channel 2004? What does this mean?
        6. Look at the output.dss file. Did the output for channel 4 get included in the output file? If not, what would you change to get output for channel 4?

      \u00a0Only output specified in the input files is written to the output.dss file. However, output for all locations is recorded in the hdf5 *.h5 output file.\u00a0

      1. Converting hydro.inp to input blocks

      Now let's convert hydro.inp completely to include files except for the SCALAR and IO_FILE sections. In future tutorials, hydro and qual simulations will be organized this way. The file hydro.inp or qual.inp is usually reserved for scalar or input/output file designations.

        1. In the previous section of this tutorial, an INITIAL_CONDITION include block was created underneath the GRID include block. We will create an initial condition input file for the original initial conditions and include that file here. Add the file ic_tutorial.inp as the first line of the INITIAL_CONDITION include block. The\u00a0channel_ic_revision.inp\u00a0file was already included in this block in the previous section of this tutorial.

      INITIAL_CONDITION ic_tutorial.inp channel_ic_revision.inp END

        1. Create a file called ic_tutorial.inp
        2. Cut (not copy) the CHANNEL_IC and RESERVOIR_IC data from\u00a0hydro.inp\u00a0and paste it into this file.
        3. Create an include block called HYDRO_TIME_SERIES as follows, in\u00a0hydro.inp.

      HYDRO_TIME_SERIES input_boundary_hydro_tutorial.inp input_transfer_flow_tutorial.inp END

        1. Create a file called\u00a0input_boundary_hydro_tutorial.inp. Cut (not copy) the BOUNDARY_STAGE and BOUNDARY_FLOW input from\u00a0hydro.inp\u00a0to\u00a0input_boundary_hydro_tutorial.inp.
        2. Similarly, create a file called input_transfer_flow_tutorial.inp. Cut and paste the INPUT_TRANSFER_FLOW data into this file.
        3. Create an include block called OUTPUT_TIME_SERIES.

      OUTPUT_TIME_SERIES output_hydro_tutorial.inp END

        1. Similarly, create the file called output_hydro_tutorial.inp. Cut and paste the OUTPUT_CHANNEL data into this file.
        2. The remaining tutorials will use include blocks extensively for both hydro and qual.
        3. Save all of the files.
      1. Running HYDRO and QUAL with all include files

        1. In the ENVVAR section of\u00a0hydro.inp\u00a0and\u00a0qual.inp, change DSM2MODIFIER to layers_include_block
        2. In Windows Explorer, navigate to the directory: _

      .

    3. Right-click on the directory,\u00a0t3_layering, and select\u00a0Open Command Window Here.

    4. In the command window, type:\u00a0hydro hydro.inp.

    5. In the command window, type:\u00a0qual qual.inp.

    6. Open the\u00a0output.dss\u00a0file in the\u00a0t3_layering\u00a0directory, and examine the results, comparing it to the last run. Did putting things in input blocks change anything?

    7. Learning more

Overriding is easy to understand. The main things you will need to keep in mind are

  1. Understanding how child table replacement works:
    1. You can't replace the child element without replacing the parent.
    2. The children of an overridden parent element are never used.
  2. What is the unique identifier for each row in a table? In most cases this is the first field and it is usually a name or a map number (it is a label rather than a piece of hard data). In some cases (e.g. output), the unique identifier may be two fields such as NAME and VARIABLE for output. Overriding only occurs when the identifier for the row is duplicated. This information is available in the table reference documentation in the \"documentation\" folder.
  3. Which data can be included in which blocks. For instance, GRID can contain CHANNEL, GATE, RESERVOIR and TRANSFER data. This information is given in Table 1 on the next page.

  1. Brain Teaser
    1. For the same change in elevation between the reservoir and connecting node, which reservoir would have a higher flow, res_1 or dummy_res?

Table 1: Include Blocks for DSM2 Input Files

Include Block

Sections

CONFIGURATION\u00a0

ENVVAR\u00a0 SCALAR

GRID

CHANNEL\u00a0 XSECT (child)\u00a0 XSECT_LAYER (child)\u00a0 RESERVOIR\u00a0 RESERVOIR_CONNECTION (child)\u00a0 GATE\u00a0 GATE_WEIR_DEVICE (child)\u00a0 GATE_PIPE_DEVICE (child)\u00a0 TRANSFER

GROUPS

GROUP\u00a0 GROUP_MEMBER (child)

HYDRO_TIME_SERIES

INPUT_TRANSFER_FLOW\u00a0 INPUT_GATE\u00a0 BOUNDARY_STAGE\u00a0 BOUNDARY_FLOW\u00a0 SOURCE_FLOW\u00a0 SOURCE_FLOW_RESERVOIR

INITIAL_CONDITION

CHANNEL_IC\u00a0 RESERVOIR_IC\u00a0

OPERATION

OPERATING_RULE\u00a0 OPRULE_EXPRESSION\u00a0 OPRULE_TIME_SERIES

OUTPUT_TIME_SERIES

OUTPUT_CHANNEL\u00a0 OUTPUT_RESERVOIR\u00a0 OUTPUT_CHANNEL_SOURCE_TRACK\u00a0 OUTPUT_RESERVOIR_SOURCE_TRACK\u00a0 OUTPUT_GATE

PARTICLE

PARTICLE_INSERTION\u00a0 PARTICLE_FLUX_OUTPUT\u00a0 PARTICLE_GROUP_OUTPUT

QUAL_SPATIAL

RATE_COEFFICIENT

QUAL_TIME_SERIES

INPUT_CLIMATE\u00a0 NODE_CONCENTRATION\u00a0 RESERVOIR_CONCENTRATION

"},{"location":"tutorials/Tutorial_4_Time_Varying_Data/","title":"Tutorial 4: Time Varying Data","text":"

Task Convert the boundary conditions and gate operations from constants to time varying input data.\u00a0 Skills Gained

  • Learn about HEC-DSS as a time series data storage system
  • Learn how HEC-DSS path names are used to reference time series in DSM2 input files\u00a0 The purpose of this tutorial is to incorporate time-varying information into the model. In the previous sections, all boundary conditions and gate timings were set as constant, and no input files were needed. In this section, the model is set to read time-varying information stored in HEC-DSS files.\u00a0 \u00a0The U.S. Army Corps of Engineers' Hydrologic Engineering Center Data Storage System, or HEC-DSS, is a database system designed to efficiently store and retrieve scientific data that is typically sequential. Such data types include, but are not limited to, time series data, curve data, spatial-oriented gridded data, and others. The system was designed to make it easy for users and application programs to retrieve and store data.\u00a0 Data in HEC-DSS format can be viewed using special software including VISTA (DWR), or HEC-DSSVue. Each time series is described in the database using DSS Pathnames (see column headings in figure). For DSM2 the pathnames are typically used as follows:

    A-Part: Data Source B-Part: Location C-Part: Variable D-Part: Date range E-Part: Data frequency\u00a0 F-Part: Description (in the sample shown the F-Part is the CalSim run identifier. For more information see the HEC-DSS website. dummy_res Figure 1: Simple channel with a reservoir, gate, flow transfer and dummy reservoir.

  • Change the Transfer Flows to HEC-DSS input:

The constant transfer flow from the previous tutorials will be changed to a time series.

  1. Create a new file in Notepad++ or another text editor called input_hydro_ts_tutorial.inp
  2. In the new file, create the\u00a0TRANSFER_TIME_SERIES table:

INPUT_TRANSFER_FLOW TRANSFER_NAME FILLIN FILE PATH END

  1. Enter the following values into the appropriate fields: 1. Input Name:\u00a0transfer_1 2. Fillin:\u00a0linear 3. Input File:\u00a0${TUTORIALINPUT} 4. Path/Value:\u00a0/TUTORIAL/TRANSFER/FLOW//15MIN/CONSTANT/

\u00a0The HEC-DSS pathnames are referred to using forward slashes /A-Part/B-Part/C-Part/D-Part/E-Part/F-Part/ In the example above, the A-Part is Tutorial, the B-Part is TRANSFER, etc. and the D-Part isn't specified.\u00a0

  1. Open hydro.inp. The input file uses an ENVVAR reference as the filename, so add the definition of TUTORIALINPUT. At the same time, set DSM2MODIFIER to timevar_1:

ENVVAR NAME VALUE\u00a0 HYDROOUTDSSFILE output.dss\u00a0 DSM2MODIFIER timevar_1\u00a0 TUTORIALINPUT ../timeseries/tutorial.dss\u00a0 END\u00a0

  1. We are going to replace the existing time series with the new file, so make sure it is listed below the other files as follows.

HYDRO_TIME_SERIES input_boundary_hydro_tutorial.inp input_transfer_flow_tutorial.inp input_hydro_ts_tutorial.inp END

  1. Save the files.
  2. Open qual.inp and set DSM2MODIFIER to timevar_1 as well (hydro.inp and qual.inp must agree or the tidefile won't be found).

  3. Running HYDRO and QUAL

    1. In Windows Explorer, navigate to the directory: _ Unknown macro: {DSM2_home}tutorialsimple{_}

      .

      1. Right-click on the directory,\u00a0t4_timevar, and select\u00a0Open Command Window Here.
      2. In the command window, type:\u00a0hydro hydro.inp. Examine timebar_1_hydro_echo.inp. Did the time series assignment get used?
      3. In the command window, type:\u00a0qual qual.inp.
      4. Open the\u00a0output.dss\u00a0file in the\u00a0t4_timevar\u00a0directory, and verify that the results are identical to the results from the previous tutorial (located in the\u00a0t3_layering\u00a0directory). Why is this?

      Adjust DSM2MODIFIER to represent a variant scenario:

      1. In Windows Explorer, navigate to the directory:\u00a0\\{DSM2_home}\\tutorial\\simple\\t4_timevar
      2. Open\u00a0hydro.inp\u00a0for editing.
      3. In the\u00a0ENVVAR\u00a0section, change the\u00a0DSM2MODIFIER\u00a0environment variable from\u00a0timevar_1\u00a0to\u00a0timevar_2.
      4. Open\u00a0qual.inp\u00a0for editing.
      5. In the\u00a0ENVVAR\u00a0section, change the\u00a0DSM2MODIFIER\u00a0environment variable from\u00a0timevar_1\u00a0to\u00a0timevar_2.

      6. Add Source information into HYDRO:

        1. In\u00a0input_hydro_ts_tutorial.inp, create the table for node sources:

      SOURCE_FLOW NAME NODE SIGN FILLIN FILE PATH END

      1. Enter the following values into the appropriate fields: 1. Name:\u00a0source1 2. Node:\u00a05 3. Input File:\u00a0${TUTORIALINPUT} 4. Path/Value:\u00a0/TUTORIAL/SOURCE/FLOW//15MIN/CONSTANT/ 5. Sign:\u00a01 6. Fillin:\u00a0linear
      2. Save the current settings.

      3. Add Corresponding Source information into QUAL: Create a file called\u00a0input_qual_ts_tutorial.inp.

        1. In input_qual_ts_tutorial.inp, create the NODE_CONCENTRATION table

      NODE_CONCENTRATION NAME NODE_NO VARIABLE FILLIN FILE PATH END\u00a0

      1. Enter the following values into the appropriate fields: 1. Input Name:\u00a0source1 2. Node:\u00a05 3. Variable:\u00a0ec 4. Input File:\u00a0${TUTORIALINPUT} 5. Path/Value:\u00a0/TUTORIAL/SOURCE/EC//15MIN/CONSTANT/ 6. Fillin:\u00a0last
      2. Add the ENVVAR definition for TUTORIALINPUT in qual.inp

      TUTORIALINPUT ../timeseries/tutorial.dss

      1. In qual.inp, make sure that the file gets used:

      QUAL_TIME_SERIES input_node_conc_tutorial.inp input_qual_ts_tutorial.inp END\u00a0

      1. Add Time-varying Tide Information for Downstream Boundary in HYDRO:
        1. Reopen\u00a0input_hydro_ts_tutorial.inp
        2. Create the\u00a0BOUNDARY_STAGE\u00a0table.

      BOUNDARY_STAGE NAME NODE FILLIN FILE PATH END

      1. In the\u00a0Boundary Stage table\u00a0enter the following values into the appropriate fields: 1. Input Name:\u00a0downstream_stage 2. Node:\u00a07 3. Input File:\u00a0${TUTORIALINPUT} 4. Path/Value:\u00a0/TUTORIAL/DOWNSTREAM/STAGE//15MIN/REALISTIC/ 5. Fillin:\u00a0linear

      2. Add Downstream Boundary in QUAL:

        1. Re-open\u00a0input_qual_ts_tutorial.inp.
        2. In the\u00a0Node Concentration table:
          1. Enter the following values into the appropriate fields:
            1. Input Name:\u00a0downstream_stage
            2. Node:\u00a07
            3. Variable: ec
            4. Input File:\u00a0${TUTORIALINPUT}
            5. Path/Value:\u00a0/TUTORIAL/DOWNSTREAM/EC//15MIN/REALISTIC/
            6. Fillin:\u00a0last
      3. Add a Gate Time Series to HYDRO:

      This gate time series will control the weir. The pipe is to be left open all the time (its default).

      1. Create a file for the gate input called\u00a0input\u00a0gate_tutorial.inp_
      2. Create the\u00a0gate time series table\u00a0INPUT_GATE:
      3. In the table enter the following values into the appropriate fields:
        1. Gate:\u00a0gate_1
        2. Device:\u00a0weir
        3. Variable:\u00a0op_from_node
        4. Input File:\u00a0${TUTORIALINPUT}
        5. Path/Value:\u00a0/TUTORIAL/GATE/FLAP_OP//IR-YEAR/TIMEVAR/
        6. Fillin:\u00a0none\u00a0(Can you tell why fillin is \"none\" for this time series?)
      4. Add the include file to hydro.inp. The time series block should look as follows:

      HYDRO_TIME_SERIES input_boundary_hydro_tutorial.inp input_transfer_flow_tutorial.inp input_hydro_ts_tutorial.inp input_gate_tutorial.inp END

      1. Save the current settings.

      Running HYDRO and QUAL

      1. In Windows Explorer, navigate to the directory: _

      .

    2. Right-click on the directory,\u00a0t4_timevar, and select\u00a0Open Command Window Here.

    3. In the command window, type:\u00a0hydro hydro.inp.

    4. In the command window, type:\u00a0qual qual.inp.

    5. Open the\u00a0output.dss\u00a0file in the\u00a0t4_timevar\u00a0directory, and examine the results.

"},{"location":"tutorials/Tutorial_5_Advanced_Output_and_Source_Tracking/","title":"Tutorial 5: Advanced Output and Source Tracking","text":"

Task

  • Create boundary and source groups
  • Request output for constituent source tracking

Skills Gained Learn how to use advanced output options in DSM2 including source tracking

The purpose of this tutorial is to provide instruction on advanced output options in DSM2. Basic outputs include flow, stage and constituent concentrations at nodes and channel locations. Advanced outputs include creating output groups and source tracking. The first part of this tutorial involves modifications to the text input file, hydro.inp. We will add some outputs and also take a look at how data in hydro.inp is prioritized. The second part introduces the use of groups for source tracking. This tutorial uses the simple channel network shown in Figure 1. Figure 1: Simple channel with a reservoir, gate, flow transfer and dummy reservoir.

  1. Add Output Paths to hydro.inp:

In this step of the tutorial, we will request output upstream and downstream of the gate and reservoir 1.

    1. In Windows Explorer, navigate to the directory, \\{DSM2_home}\\tutorial\\simple\\t5_output.
    2. Open the file addin.inp and note the new output paths for the channels and reservoir.
    3. Copy the entire file contents to the clipboard.
    4. Open the file hydro.inp.
    5. Navigate to the bottom of the file and paste the information. Note that there are now two output requests for a location named bnd_1. In hydro.inp bnd_1 is defined as channel1 location 0 and in output_hydro_tutorial.inp it has been defined as channel 1 location 100.

For flow data at bnd_1, will the output be written at the upstream end of the channel (location 0) or 100ft downstream? Answer: The output will be for 100ft downstream because the output request in the launch file (e.g. hydro.inp or qual.inp) supersedes all other output requests that have the same identifier. In this case the identifier is the NAME and VARIABLE combination (e.g. bnd_1 and flow). How would you get output at channel 1 and both location 0 and location 100? Answer: Give each location a unique identifier, eg. bnd_1 and bnd_100.

  1. Add Boundary and Source Groups:

GROUPS are user-defined groups of model objects, for instance groups of water bodies or groups of boundary inputs. Groups are used a number of places in DSM2, including: tracking of constituents originated from grouped sources, tracking of particles as they reside or move between groups of water bodies and/or boundaries, and assignment of rate coefficients in QUAL to groups of water bodies. In the output specifications, groups are used to define aggregate sources for source tracking. For example, output groups could be used to track mass originating from all the boundaries, or from all Delta Island Consumptive Use (DICU) diversions, etc. In this section, we will create two output groups: boundary locations and water quality constituent source locations.

  1. In the study directory, create a file called group_tutorial.inp.
  2. In the group_tutorial.inp file, add a group table. Note that this is a parent table for overwriting/layering purposes. Define a boundary and a sources group:

GROUP NAME boundary sources END

  1. Now define the group members. Create the GROUP_MEMBER table below the GROUP table:

GROUP_MEMBER GROUP_NAME MEMBER_TYPE PATTERN END

  1. In the Group Members table: 1. Enter a row with the following values in the appropriate fields: 1. GROUP_NAME: boundary 2. MEMBER_TYPE: stage 3. PATTERN: .stream.** 4. Note that the dot-star .* in the above pattern is a \"regular expression\" wildcard. You can use any standard Perl-style regular expression in groups, but the html documentation for GROUPS describes most of the patterns you can put in a GROUP_MEMBER that are really useful.

Look in the input_boundary_hydro_tutorial.inp file and determine what boundary conditions are part of the boundary group based on the member type \"stage\" and the pattern \".stream.\".

  1. Enter another row with the following values in the appropriate fields: 1. GROUP_NAME: boundary 2. MEMBER_TYPE: flow_boundary 3. PATTERN: .stream.**

Look in the input_boundary_hydro_tutorial.inp file and determine what boundary conditions are part of the boundary group based on the member type \"flow_boundary\" and the pattern \".stream.\".

  1. In the Group Members table insert another row with the following values in the appropriate fields: 1. GROUP_NAME: sources 2. MEMBER_TYPE: source_sink 3. PATTERN: source1

Look in the various qual input files and determine which inputs will make up the sources group defined above.

  1. In the qual.inp file, create the GROUPS (note the plural) include block that will reference this file:

GROUPS group_tutorial.inp END

  1. Save the current settings.

  2. Source Tracking:

Source tracking (aka fingerprinting) determines the amount of water or of a constituent at one location that originated from a specified location. For constituent fingerprinting, 1) define a source group (e.g. boundaries or DICU locations), and then 2) request output for that group. For volumetric fingerprinting that indicates the percentage of flow that originated from each boundary location, 1) create a fingerprinting constituent and set its value equal to 100 at all boundaries, 2) define a source group for all boundaries, and 3) request output from that source group.

  1. Add Source Tracking Output for Channel 5:

To demonstrate source tracking, this part of the tutorial examines how much of the EC in channel 5 (see Figure 1) came from the boundaries and from other sources. For comparison purposes, the EC from all sources will also be output. Create a new file called output_qual_sourcetrack.inp.

    1. In this file, create an OUTPUT_CHANNEL_SOURCE_TRACK table. Refer to the documentation to create the header.
    2. In the Channel Output table create 3 rows:
      1. For the first new row, enter the following values into the appropriate fields:
        1. Name: ch5
        2. Channel: 5
        3. Distance: 5000
        4. Variable: ec
        5. Source Group: all (this will track ec from all sources)
        6. Output File: ${QUALOUTDSSFILE}
        7. Time Interval: 15min
        8. Period Op: inst
      2. For the second new row, enter the following values into the appropriate fields:
        1. Name: ch5
        2. Channel: 5
        3. Distance: 5000
        4. Variable: ec
        5. Source Group: boundary
        6. Output File: ${QUALOUTDSSFILE}
        7. Time Interval: 15min
        8. Period Op: inst
      3. For the third new row, enter the following values into the appropriate fields:
        1. Name: ch5
        2. Channel: 5
        3. Distance: 5000
        4. Variable: ec
        5. Source Group: sources
        6. Output File: ${QUALOUTDSSFILE}
        7. Time Interval: 15min
        8. Period Op: inst
    3. Save the current settings.
  1. Running HYDRO and QUAL

    1. Open a command window for the t5_output directory.
    2. In the command window, type: hydro hydro.inp.
    3. In the command window, type: qual qual.inp.
    4. Open the hydro echo file output_tutorial_hydro_echo.inp. Which version of bnd_1 got picked up by the model, the one in hydro.inp or the one in output_hydro_tutorial.inp.
    5. Open the output.dss file in the t5_output directory, and examine the results. Do a mass balance to make sure the source tracking adds up.

  1. Brain Teaser

How would you set up a source tracking simulation to determine what percentage of water/flow at a given location originated from a specified boundary?

"},{"location":"tutorials/Tutorial_6_Operating_Rules/","title":"Tutorial 6: Operating Rules","text":"

Task

  • Operate a gate based on stage criteria
  • Regulate a source/sink inflow

Skills Gained Get an introduction to operating rules

The purpose of this tutorial is to practice using Operating Rule Language (ORL) statements to set gate operations and flows. With operating rules, expressions can be crafted to steer the model on-the-fly; e.g., a gate can be directed to automatically close when stage conditions reach a certain threshold. In this tutorial we will create operating rules to operate a gate and to regulate a source/sink inflow. Extensive documentation on the DSM2 operating rules can be found at: START menu \uf0f0Programs \uf0f0 DSM2_v8 \uf0f0 DSM2_documentation\uf0f0 Operating Rules

Figure 1: Simple channel with a reservoir, gate, flow transfer and dummy reservoir.

  1. Adding a Second Gate Where Op Rule Will Be Applied

In this step of the tutorial we will prepare a new layer and add the gate that will be manipulated by the op rule.

    1. In Windows Explorer, navigate to the directory, \\{DSM2_home}\\tutorial\\simple\\t6_oprule.
    2. Create a file grid_tutorial_opitems.inp.
    3. Open grid_tutorial_base.inp. We are going to copy items from this file into the new file with minor changes:

      1. Copy the GATE table with gate_1, paste it into grid_tutorial_opitems.inp and change the following fields:

        1. NAME: gate_2

        2. FROM_OBJ: channel

        3. FROM_IFENTIFIERS: 5

        4. TO_NODE: 5

          1. In the Gate_Weir Devices table:
            1. Copy the data from gate_1 to grid_tutorial_opitems.inp, change the gate name to gate_2 and change the following fields:
        1. GATE_NAME: Gate_1
        2. Elev: -2
        3. Save the current settings.
        4. Add grid_tutorial_opitems.inp to the list of included files in hydro.inp.
  1. Adding Output for the Second Gate:

    1. Create a file called output_oprule_tutorial.inp.
    2. Create the OUTPUT_GATE table:

OUTPUT_GATE NAME GATE_NAME DEVICE VARIABLE INTERVAL PERIOD_OP FILE END

    1. In the output table enter the following values into the appropriate fields: 1. 1. Output Name: gate_2_weirop 2. Gate name: gate_2 3. Device: weir 4. Variable: op-from-node 5. Time Interval: 15min 6. Period Op: inst 7. File: ${HYDROOUTDSSFILE}
    2. Add the following channel outputs in a new OUTPUT_CHANNEL table:

OUTPUT_CHANNEL NAME CHAN_NO DISTANCE VARIABLE INTERVAL PERIOD_OP FILE trigger_loc 4 7500 stage 15min inst ${HYDROOUTDSSFILE} ds_gate2 5 0 flow 15min inst ${HYDROOUTDSSFILE} END

    1. Add the output layer to the list of include files in hydro.inp and save your work.
  1. Create an Operating Rule to Close the Weir when Stage is Low:

Now we are ready to write the first operating rule. This rule closes the new gate we created during times where stage at a monitoring point is low. First we will define the rule in terms of an expression called stage_critical (the condition where stage violates a minimum) and op_applies (a seasonal condition that is True when we are controlling the gate for stage. In a later step we will define these variables.

    1. Create a file called oprule_tutorial.inp.
    2. Create the Operating Rules table:

OPERATING_RULE NAME ACTION TRIGGER END

  1. Enter the following values into the appropriate fields: 1. Name: weir_close 2. Action Definition: \"SET gate_op(gate=gate_2, device=weir, direction=from_node) TO CLOSE RAMP 30MIN\"

You must use quotes for inputs with spaces.

  1. Trigger Definition: \"stage_critical AND op_applies\"

    1. Create an OPERATION include block in hydro.inp and add the new file so that it will be used by DSM2-HYDRO.

OPERATION oprule_tutorial.inp END

  1. Save the current settings.

Note that the expressions stage_critical and op_applies will be created in a later step.

  1. Create an Operating Rule to Open the Weir when Stage is High:

As before, we will enter the rule to open the weir first in terms of the expressions stage_relax (a condition where stage is safely above a threshold where we can open the gate) and op_applies. In the next step we will define these expressions.

    1. In the Operating Rules table enter the following values into the appropriate fields: 1. Name: weir_open 2. Action Definition: \"SET gate_op(gate=gate_2, device=weir, direction=from_node) TO OPEN RAMP 30MIN\" 3. Trigger Definition: \"( stage_relax AND op_applies) OR NOT(op_applies)\"
    2. Save the current settings.
    3. In the hydro.inp file, add the following environmental variables and values into the ENVVAR section:

STAGE_CRITICAL 1.4 STAGE_RELAX 1.6

  1. Define Expressions used in the rule
    1. In the file oprule_tutorial.inp, create the OPRULE_EXPRESSION table:

OPRULE_EXPRESSION NAME DEFINITION END

      1. Enter the following values into the appropriate fields: 1. Name: op_applies 2. Definition: \"SEASON \\< 01FEB\" 2. Enter the following values into the appropriate fields. Don't forget quotes!! 1. Name: stage_critical 2. Definition: \"chan_stage(channel=4, dist=7500) \\< ${STAGE_CRITICAL}\" 3. Enter the following values into the appropriate fields: 1. Name: stage_relax 2. Definition: \"chan_stage(channel=4, dist=7500) > ${STAGE_RELAX}\"
    1. Save the current settings.

    2. Now run HYDRO and QUAL:

      1. Open a command window for the t6_oprule directory.
      2. In the command window, type: hydro hydro.inp.
      3. In the command window, type: qual qual.inp.
      4. Open the output.dss file in the t6_oprule directory, and examine the results.
  1. Add a Reduced Flow Operating Rule:

In our next operating rule, we will control the inflow to a node by having it toggle back and forth between a larger \"full flow\" and a \"reduced flow\". First we will enter the rule and then we will define the full and reduced flows.

    1. In the Operating Rules table enter the following values into the appropriate fields: 1. 1. Name: flow_reduce 2. Action Definiton: SET ext_flow(name=source1) TO ifelse(stage_critical,reduced_flow,full_flow) 3. Trigger Definition: TRUE
    2. Now create the expressions that define full_flow and reduced_flow. In the Oprule Expressions table:

      1. Enter the following values into the appropriate fields that define full_flow. This will involve the time series source_flow which we will enter later:

        1. Input Name: full_flow

        2. Definition: ts(name=source_flow) [note: this is a reference to a time series we haven't defined yet].

      2. Do the same for reduced_flow. Note: we are defining reducedflow in terms of the time series. There is no guarantee of what order expressions will be evaluated, so you cannot safely define _reduced_flow in terms of another expression such as full_flow. Enter the following values into the appropriate fields:

        1. Input Name: reduced_flow
        2. Definition: 0.5*ts(name=source_flow).
        3. Save the current settings.
      3. Now we will define the source_flow time series upon which the full_flow and reduced_flow expressions are based.
        1. Create the Operation Time Series table:

OPRULE_TIME_SERIES NAME FILLIN FILE PATH

      1. Enter the following values into the appropriate fields: 1. Input Name: source_flow

        1. Input File: ${TUTORIALINPUT}

        2. Path: /TUTORIAL/SOURCE/FLOW//15MIN/CONSTANT/ [ Note: there are two forward slashes between FLOW and 15MIN]

        3. Fillin: none

    1. Save the current settings.

  1. Override the Expression op_applies:

Recall that op_applies is used to determine when the weir is operated. Previously the definition of this expression was seasonal: the expression was SEASON \\< 01FEB. The goal now is to make the same expression depend on a time series. Rather than change the expression, we will override it in a new layer.

    1. Add a new Operating Rules Layer: 1. Create a file called oprule_ tutorial revision.inp_
    2. Redefine the expressions that define op_applies. In the Expressions table:

      1. Create the OPRULE_EXPRESSION table.
      2. Enter the following values into the appropriate fields:

        1. Input Name: op_applies

        2. Definition: \"ts(name=op_used)>0.0\" [note: this is a reference to a time series we will define in the next step]._

    3. Define the time series op_used on which the op_applies expression depends. In the Operation Time Series table:

      1. Right-click and select Insert row.
      2. Enter the following values into the appropriate fields:
        1. Input Name: op_used
        2. Input File: ${TUTORIALINPUT}
        3. Path: /TUTORIAL/GATE/FLAP_OP//IR-YEAR/TIMEVAR/
        4. Fillin: none
    4. Add oprule_tutorial_revision.inp after oprule_tutorial.inp in the OPERATIONS block of hydro.inp so that it will be used by HYDRO.
    5. Run HYDRO and QUAL and examine the results.
"},{"location":"tutorials/Tutorial_6_Operating_Rules/#attachments","title":"Attachments:","text":"

worddavd56eb5f63f4c9181cb2a8632c8c6c562.png (image/png) worddave8e1df4e853bb46c4ee6f68afece040d.png (image/png)

"},{"location":"tutorials/Tutorials/","title":"Tutorials","text":"
  • An Introduction to DSM2 Tutorials
  • DSM2 Tutorial Overview
  • Tutorial 1: Channels
  • Tutorial 2: Reservoirs, Gates, Transfers
  • Tutorial 3: Layering
  • Tutorial 4: Time Varying Data
  • Tutorial 5: Advanced Output and Source Tracking
  • Tutorial 6: Operating Rules
  • DSM2 Bay-Delta Tutorial 1: Historical Simulation
  • DSM2 Bay-Delta Tutorial 2: Source Tracking (Fingerprinting)
  • DSM2 Bay-Delta Tutorial 3: Planning Simulation
  • DSM2 Bay-Delta Tutorial 4: Batch Preprocessing
  • DSM2 Bay-Delta Tutorial 5: Suisun Marsh Operating Rules
  • DSM2 Bay-Delta Tutorial 6: SDIP Permanent Gate Simulation
  • DSM2 Bay-Delta Tutorial 7: Clifton Court Diurnal Pumping
  • Delta Tutorial 8 - Temperature Simulation
  • Delta Tutorial 9 - DO Simulation
  • Post-processing
  • Presentations
  • CSDP Tutorial
  • Background Slide Material
  • Troubleshooting
  • DSM2 FAQ

The tutorial in PDF forms are here\u00a0

Don't edit these. They are only here for reference. This should be removed once the tutorials are updated and correctly formatted.

Download All

The presentations from the class are here

"},{"location":"tutorials/Tutorials/#attachments","title":"Attachments:","text":"

DSM2 Overview.pdf (application/pdf) DeltaTutorial7-Diurnal Pumping CCFB.pdf (application/pdf) DeltaTutorial6-SDIP Op Rules.pdf (application/pdf) DeltaTutorial5-Marsh Op Rules.pdf (application/pdf) DeltaTutorial4-Batch.pdf (application/pdf) DeltaTutorial3-Planning.pdf (application/pdf) DeltaTutorial2-Source Tracking.pdf (application/pdf) DeltaTutorial1-Historical.pdf (application/pdf) BasicTutorial6-Oprule.pdf (application/pdf) BasicTutorial5-Output.pdf (application/pdf) BasicTutorial4-Timevar.pdf (application/pdf) BasicTutorial3-Layering.pdf (application/pdf) BasicTutorial2-Reservoir_Gate_Transfer.pdf (application/pdf) BasicTutorial1-Channels.pdf (application/pdf) An Introduction to DSM2 Tutorials.pdf (application/pdf)

"}]} \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml new file mode 100644 index 00000000..c4f1ac89 --- /dev/null +++ b/sitemap.xml @@ -0,0 +1,573 @@ + + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + + None + 2023-10-18 + daily + + \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz new file mode 100644 index 00000000..3572eb77 Binary files /dev/null and b/sitemap.xml.gz differ diff --git a/tutorials/An_Introduction_to_DSM2_Tutorials/index.html b/tutorials/An_Introduction_to_DSM2_Tutorials/index.html new file mode 100644 index 00000000..ca162bc3 --- /dev/null +++ b/tutorials/An_Introduction_to_DSM2_Tutorials/index.html @@ -0,0 +1,530 @@ + + + + + + + + + + + + + + + + + + An Introduction to DSM2 Tutorials - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

An Introduction to DSM2 Tutorials

+

DSM2 Website

+

Official Website

+

Documentation

+

If DSM2 is installed on your computer, clicking on the START menu and select ProgramsDSM2_v8DSM2_documentation

+

Introduction

+

Welcome to the Delta Simulation Model 2 (DSM2) Version 8 tutorial.

+

The tutorial is divided into two sets of lessons. The first set teaches +basic DSM2 skills using simplified channels. The second set of tutorials +explores more advanced DSM2 skills using the model application to the +Sacramento-San Joaquin Delta. The input files for these tutorials are in +the tutorial\simple and tutorial\historical directories +respectively.

+

The goal of the beginning tutorials (BasicTutorials 1-6, see Figure 1) +is to familiarize you with the DSM2 input system and fundamental +modeling capabilities. This six-part tutorial builds a model of a simple +channel system, with each part building in complexity from its +predecessor. It is recommended that the tutorials be completed in order, +but it is not necessary since the tutorials are self contained.

+

+

Figure 1: DSM2 Basic Tutorials

+

The goal of the Delta tutorials (DeltaTutorials 1-5, see Figure 2) is to +familiarize you with Delta specific DSM2 applications and tasks. In +addition a DSM2 Overview document has been provided that describes the +DSM2 modules (HYDRO, QUAL, and PTM) and their typical modes of +application (historical, real-time and planning).
+

+

Figure 2: DSM2 Delta Tutorials

+

{DSM2_home}

+

In working the tutorials, the directory where you installed DSM2 will be +referred to as {DSM2_home}. E.g., if you accepted the default install +directory, {DSM2_home} would be d:\delta\dsm2 (there may also be a +version number in the directory name).

+

The first tutorial is called Channels, and involves setting up the +channel grid, adding parameters, setting boundary conditions, and +listing output locations. The second tutorial is called Reservoir Gate +Transfer, and involves adding these components to the simple channel +system.

+

The third tutorial is called Layering. The section guides you through +the nuances of organizing data in multiple files. Layers are part of the +DSM2 data management system. They allow input items to be grouped in +logical bundles, and allow changes to be brought into an old simulation +without erasing or altering archived items.

+

The fourth tutorial is called Timevar, and demonstrates the addition +of time-varying information to the model. In the previous sections, all +boundary conditions and gate timings were set as constant, and no input +files were needed. In this section, the model is set to read +time-varying information stored in DSS files.

+

The fifth tutorial is called Output, and covers advanced output +options. The first part involves modifications to the text input file, +hydro.inp. The second part describes the use of groups and source +tracking in QUAL.

+

The sixth tutorial is called Oprule, and covers the use of Operating +Rule Language (ORL) statements to set gate operations. In the previous +versions of DSM2, the input text and time series files had to explicitly +state the operations of gates. With the operating rules, expressions can +be used to make the model operate gates on-the-fly. E.g., a gate can be +directed to automatically close when salinity conditions reach a certain +threshold.

+

There are two icons that are used to highlight information in the +tutorials.
+ +Indicates a DSM2 "gotcha" moment in which extra care may be necessary.
+ +Indicates a question to put your new found DSM2 knowledge to the test.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/DSM2_Bay-Delta_Tutorial_1_Historical_Simulation/index.html b/tutorials/DSM2_Bay-Delta_Tutorial_1_Historical_Simulation/index.html new file mode 100644 index 00000000..c5381f7a --- /dev/null +++ b/tutorials/DSM2_Bay-Delta_Tutorial_1_Historical_Simulation/index.html @@ -0,0 +1,911 @@ + + + + + + + + + + + + + + + + + + DSM2 Bay-Delta Tutorial 1: Historical Simulation - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 Bay-Delta Tutorial 1: Historical Simulation

+

Purpose:

+

This tutorial will demonstrate how to launch a basic run of the +historical HYDRO and QUAL simulations.You will also get practice using +the study templates that are distributed with DSM2, see how the +configuration file is used, make some changes in the output and learn +about the post-processing "transfer" script for averaging your output.

+

Except as part of a re-calibration, it is rare to make big changes in +the historical simulation. More commonly, you will want to add a few +output locations or scalars. Large scale policy or physical changes are +usually analyzed within a Planning simulation framework, covered in a +later tutorial.

+

HYDRO and QUAL

+
    +
  1. Copy the historical template:
      +
    1. In windows, copy the + folder \{DSM2_home}\study_template\historical to the tutorial + directory, after creating \{DSM2_home}\tutorials\historical. + If there is already a historical folder, just copy the contents.
    2. +
    3. Open historical_hydro.inp and historical_qual_ec.inp. Note + the CONFIGURATION sections of both reference a + file configuration_historical.inp. By containing variables + such as run dates in this file, you can more easily synchronize + the models.
    4. +
    5. Examine the common_input directory. By looking + at historical_hydro.inp, configuration_historical.inp and the + other main input files, you will see that many of the included + files for the models are in the directory ${DSM2INPUTDIR}. In + this distribution, this variable points + to /dsm2/common_input – a repository in which all the + distributed DSM2 input files are housed. Later, you may want to + copy the input files locally and repoint ${DSM2INPUTDIR} to this + local directory. In fact, there are tools to help with this. + Regardless of whether you copy them, please resist changing the + files directly – it is much easier to diagnose problems if you + make your changes in the main file (historical_hydro.inp, + historical_qual_ec.inp…) or in a new file of your own making.
    6. +
    +
  2. +
  3. Modify the Run Times in the Configuration File:
  4. +
+

In the configuration file, set the runtime definitions as follows.

+

runtime

+

START_DATE 01JUL1996
+START_TIME 0000
+QUAL_START_DATE 02JUL1996
+PTM_START_DATE ${QUAL_START_DATE}
+END_DATE 01SEP1996
+END_TIME 0000 

+
    +
  1. Note the Output Step in HYDRO:
  2. +
+

If you look in the channel output files +(e.g. output_channel_std_hydro_rki_20090715.inp), you will find that +the time step of the output is itself an ENVVAR definition called +${FINE_OUT}. This is usually defined as 15 minutes in configuration +file. Although DSM2 v8 will perform daily averages, it is recommended +that you use the finer output and aggregate as a postprocessing step (we +will cover this shortly).

+
    +
  1. Add some Output
  2. +
+

In historical_hydro.inp, add a block containing an extra flow output for +Old River at Head. Notice that the name in this case is a "practical" +name. Although you may sometimes add input with names like "ch56_0", +such a name is redundant with the other information in the line, is +difficult for non-modelers to understand and causes confusion if the +grid numbering changes.

+

OUTPUT_CHANNEL
+NAME CHAN_NO DISTANCE VARIABLE INTERVAL PERIOD_OP FILE 
+oldr_head 56 0 flow ${FINE_OUT} inst ${HYDROOUTDSSFILE} 
+END 

+
    +
  1. +

    Run HYDRO and QUAL:

    +
      +
    1. In Windows Explorer, navigate to the directory, + _\{DSM2_home}\tutorial_
    2. +
    3. Right-click on the historical directory, and select, Open + Command Window Here.
    4. +
    5. In the command window, type: hydro historical_hydro.inp
    6. +
    7. Wait for HYDRO to complete its runs.
    8. +
    9. Now type: qual historical_qual_ec.inp
    10. +
    +
  2. +
  3. +

    Aggregate the Output

    +
  4. +
+

Above we recommended that you use post-processing to aggregate your +output. Let's see how this works. At a command prompt in the +${study}/output directory, type:
+> transfer -–help
+This command should give you the options for the "transfer.py" script +that will help you aggregate your output. 
+For instance, if you want to create a daily average of all your flow +output, type (this is all one line): 
+>transfer --out=postpro.dss --selection=///FLOW//// 
+--transform=period_ave --interval=1DAY historical.dss 
+As another example, you may want to take a Godin average of all the +stage output and put it in the same file: 
+>transfer --out=postpro.dss --selection=///STAGE//// 
+--transform=godin historical.dss 
+You can similarly do monthly averages by making the interval 1MON and +you can "slice" in time by specifying a time window (the syntax is given +by the help command:
+> transfer -–help 

+
    +
  1. +

    Running QUAL with Volumetric fingerpringting:

    +
      +
    1. In the command window, type: qual historical_qual_vol.inp.
    2. +
    3. Open the qual echo file qual_vol_echo_historical.inp in the + output subfolder.
    4. +
    5. Open the results file in the output subfolder, and examine the + results.
    6. +
    +
  2. +
  3. +

    Running QUAL with Nonconservative Constituents fingerpringting:

    +
      +
    1. In Windows Explorer, navigate to the + directory, \{DSM2_home}\study_template_ + _historical_qual_do\ Conduct a similar study as EC and VOL.
    2. +
    3. Notice that the running time period is 1996-2000, since Stockton + effluent is not using 'constant' but detailed + timeseries: effluentflow96-2000.dss
    4. +
    +
  4. +
+

ENVVAR
+NAME VALUE 
+STOCKTON_FLOW_FILE ${TSINPUTDIR}/effluentflow96-2000.dss # needed for +DO runs, if not available use constant
+END

+
    +
  1. +
      +
    1. Open the results file in the output subfolder, and examine the + results.
    2. +
    +
  2. +
+

Particle Tracking Modeling (PTM)

+
    +
  1. +

    Run PTM in Delta Grid under Historical Condition

    +
      +
    1. +

      In Windows Explorer, navigate to the directory, _

      +

      Unknown macro: {DSM2_home}{_}tutorial\ in the command window, +type: ptm historical_ptm.inp. *If necessary, reduce the running +time period by +modifying END_DATE in configuration_historical.inp. + 2. Open the ptm echo file ptm_echo_historical.inp in the output +subfolder and examine the contents. + 3. Open the ptmout.dss file in the output subfolder, and examine +the results. Do a little mass balance to see if the particle +fluxes add up.

      +
    2. +
    +
  2. +
  3. +

    Repeat with Particle Filter on Channel Turned on:

    +
  4. +
+

Set particle filter at Head of Old River 

+
    +
  1. +
      +
    1. In historical_ptm.inp, create the table for particle filter, + with constant closing operation.
    2. +
    +
  2. +
+

PARTICLE_FILTER
+NAME NODE AT_WB FILLIN FILE PATH
+Filter_HOR 8 chan:54 last constant 0
+END 

+
    +
  1. +
      +
    1. Add the related output, like
    2. +
    +
  2. +
+

PARTICLE_FLUX_OUTPUT
+NAME FROM_WB TO_WB INTERVAL FILE
+SJR-OLD chan:7 chan:54 15min ${PTMOUTPUTFILE}
+END 

+
    +
  1. +
      +
    1. Open the ptmout.dss file in the output subfolder, and examine + the results
    2. +
    +
  2. +
  3. +

    Repeat with Particle Filter on Reservoir Turned on:

    +
  4. +
+

With particle filter installed at Clifton Court Forebay (this is a +special version of filter dealing with source flows directly connecting +to reservoir) 

+
    +
  1. +
      +
    1. In historical_ptm.inp, create the table for particle filter, + with time-varying operation control, specified in DSS file.
    2. +
    +
  2. +
+

PARTICLE_RES_FILTER
+NAME RES_NAME AT_WB FILLIN FILE PATH
+clfc_div_bbid clifton_court qext:dicu_div_bbid last ./filterOp.dss +/HIST+FILTER/CLFC_DIV/FILTER_OP//IR-DECADE/DWR-BDO/
+END 

+
    +
  1. +
      +
    1. Add the related output, like
    2. +
    +
  2. +
+

PARTICLE_FLUX_OUTPUT
+NAME FROM_WB TO_WB INTERVAL FILE
+SWP-AG res:clifton_court group:bbid 15min ${PTMOUTPUTFILE} 
+END 

+
    +
  1. +
      +
    1. Open the ptmout.dss file in the output subfolder, and examine + the results
    2. +
    +
  2. +
  3. +

    Repeat with Particle Filter on Source Flow Turned on:

    +
  4. +
+

Agriculture source flow (diversions and seepages) could be required to +restrict particles from entering in simulations. It is one application +for particle filter. 

+
    +
  1. +
      +
    1. In Windows Explorer, navigate to the directory, + \{DSM2_home}\tutorial\ Open the + file delta_dicu_filter_closed.txt. Copy the content into + historical_ptm.inp
    2. +
    +
  2. +
  3. +
      +
    1. Open the ptmout.dss file in the output subfolder, and examine + the results
    2. +
    +
  4. +
+

Making animation of Particle Tracking Modeling (PTM)

+
    +
  1. Modify the PTM input file to make text output and to turn on the + dispersion parameters:
      +
    1. In Windows Explorer, copy the folder ptm_animate (with + subfolders) from \{DSM2_home}\study_templates\ptm_animate
    2. +
    +
  2. +
+

to the study directory, creating:
+\{DSM2_home}\tutorials\historical\ptm_animate

+
    +
  1. +
      +
    1. With the PTM, it is useful to be able to switch easily between + text and dss output formats – note that the animator requires + text files. The configuration_historical.inp file is + structured so that we can swap the environmental + variable PTMOUTPUTFILE. We are going to + point PTMOUTPUTFILE to txt format so we can use the animator. + 1. 1. Locate the PTMOUTPUTFILE at the end of the file, and + modify as:
    2. +
    +
  2. +
+

PTMOUTPUTFILE ptmout.txt

+
    +
  1. +
      +
    1. Open the file, historical_ptm.inp. + 1. Locate the SCALARS section. Check all of the dispersion + parameters to be t.
    2. +
    +
  2. +
+

ptm_ivert t # Use Vertical velocity profile
+ptm_itrans t # Use Transverse velocity profile
+ptm_iey t # Use transverse mixing
+ptm_iez t # Use vertical mixing

+
    +
  1. +
      +
    1. +
        +
      1. Make sure the anim_db.bin line is turned on (this is + usually commented out to save much running time)
      2. +
      +
    2. +
    +
  2. +
+

ptm anim out 15min ${DSM2OUTPUTDIR}/anim_db.bin 

+
    +
  1. Run PTM:
      +
    1. In the command window, type: ptm historical_ptm.inp.
    2. +
    3. In Windows Explorer:
        +
      1. Navigate to the directory,
      2. +
      +
    4. +
    +
  2. +
+

\{DSM2_home}\tutorials\historical\output

+
    +
  1. +
      +
    1. +
        +
      1. Examine the output in the ptmout.txt file. + 2. Copy the files, anim_db.bin and ptmout.txt. + 3. Navigate to the directory,
      2. +
      +
    2. +
    +
  2. +
+

\{DSM2_home}\tutorials\historical\ptm-animate\dual\left_panel

+
    +
  1. +
      +
    1. +
        +
      1. Paste the files in the left_panel directory.
      2. +
      +
    2. +
    +
  2. +
  3. +

    Repeat with Dispersions Parameters Turned Off:

    +
      +
    1. In Windows Explorer, navigate to the directory, + _\{DSM2_home}\tutorials\historical_
    2. +
    3. Open the file, historical_ptm.inp.
        +
      1. Locate the SCALARS section.
      2. +
      3. Change all of the dispersion parameters from t to f.
      4. +
      +
    4. +
    +
  4. +
+

ptm_ivert f # Use Vertical velocity profile
+ptm_itrans f # Use Transverse velocity profile
+ptm_iey f # Use transverse mixing
+ptm_iez f # Use vertical mixing

+
    +
  1. +
      +
    1. In the command window, type: ptm historical_ptm.inp.
    2. +
    3. In Windows Explorer:
        +
      1. Navigate to the directory,
      2. +
      +
    4. +
    +
  2. +
+

\{DSM2_home}\tutorials\historical\output

+
    +
  1. +
      +
    1. +
        +
      1. Copy the files, anim_db.bin and ptmout.txt. + 2. Navigate to the directory,
      2. +
      +
    2. +
    +
  2. +
+

\{DSM2_home}\tutorials\historical\ptm-animate\dual\right_panel

+
    +
  1. +
      +
    1. +
        +
      1. Paste the files in the right_panel directory. + 2. Navigate to the directory,
      2. +
      +
    2. +
    +
  2. +
+

\{DSM2_home}\tutorials\historical\ptm-animate

+
    +
  1. +
      +
    1. +
        +
      1. Double-click on dual.bat to open the animator. + 2. Press start to start the animator and use the controls to + adjust the speed.
      2. +
      +
    2. +
    +
  2. +
  3. +

    Modifying the Animator Display:

    +
      +
    1. The left_panel and right_panel directories contain files + needed for operation:
        +
      1. Modify the data path names: fluxInfoDB.data stores file + and path information for the PTM output (the flux output in + the text file is labeled with DSS-like path names). The + listings in this file will be turned into the small flux bar + graphs you see in the animator. The integer you see above + the file name is an internal node ID, which is how you + assign locations in the animator (also + see network.dat below). Also, an output file of the PTM + version 8 contains a minor version number. So the user may + need to modify the data path names in + the fluxInfoDB.data according to corresponding path names + in an output file, ptmout.txt in this example.
      2. +
      3. labelsDB.data stores label information. You list labels + and their location (using nodes, see network.dat below)
      4. +
      5. network.dat stores internal x- and y-locations for + nodes and channels. Pseudo-nodes are used for labels and + other annotations as noted above. Please note that the nodes + that are used in network.dat are internal node numbers, + not external. (This makes the file very hard to edit, a + point that will probably be addressed in the future). If you + want a mapping of external-to-internal numbers, look at your + echoed hydro output file (*.out or *.hof).
      6. +
      +
    2. +
    3. Examine these files and the labels in them. Change the labels to + something creative and reopen the animator.
    4. +
    +
  4. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/DSM2_Bay-Delta_Tutorial_2_Source_Tracking_Fingerprinting_/index.html b/tutorials/DSM2_Bay-Delta_Tutorial_2_Source_Tracking_Fingerprinting_/index.html new file mode 100644 index 00000000..bf67ae0d --- /dev/null +++ b/tutorials/DSM2_Bay-Delta_Tutorial_2_Source_Tracking_Fingerprinting_/index.html @@ -0,0 +1,576 @@ + + + + + + + + + + + + + + + + + + DSM2 Bay-Delta Tutorial 2: Source Tracking (Fingerprinting) - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + +

DSM2 Bay-Delta Tutorial 2: Source Tracking (Fingerprinting)

+

Purpose: The purpose of this tutorial is to use the source tracking capabilities of the model to create a fingerprinting study. We will set up both volumetric and concentration-based fingerprinting and visualize the results.

+
    +
  1. +

    Reopen the historical tutorial

    +
      +
    1. In windows, navigate to \{DSM2_home}\tutorial\historical. + (folders and files are copied as described in the Delta tutorial + 1)
    2. +
    +
  2. +
  3. +

    Create a model for source tracking:

    +
  4. +
+

In the background, source tracking imposes a computational cost on QUAL +that is the same as one additional constituent per source. For this +reason, it is useful to comment out source tracking as a standard course +of running DSM2. But when you desire source tracking, you can uncomment +it as follows:

+
    +
  1. +
      +
    1. In historical_qual_ec.inp, locate the GROUPS include section.
    2. +
    3. Uncomment the group definitions for source tracking (delete the + # sign at the start of the line). You may wish to review the + referenced file to see how the groups are identified.
    4. +
    5. Similarly uncomment the two fingerprinting files – the ones that + have "source_track" in their names.
    6. +
    +
  2. +
  3. +

    Define volumetric inputs

    +
      +
    1. Create the QUAL volumetric input file. Copy + historical_qual_ec.inp and rename as + historical_qual_vol.inp.
    2. +
    3. Modify the concentration blocks. Go through each of the node and + reservoir concentration files for QUAL ec. Modify the + constituent (variable) to unit, value (FILE) to constant, + (PATH) to 100. This step is conceptually simple, but will + produce a large file – feel free to break it into several files + if you prefer. If you are using Notepad++, you may want to use + its column delete/copying features (press alt while you make + your selection).
    4. +
    5. Compare what you produced to the existing files in common_input + that have "volumetric" in their names (node and reservoir + concentration). Are they the same input? How could you test this + using the echoed output?
    6. +
    +
  4. +
  5. +

    Define the fingerprinting output

    +
      +
    1. Specify Clifton Court concentration output for each of the + source groups defined in the previous step, for both + constituents: ec and unit, in block + OUTPUT_RESERVOIR_SOURCE_TRACK. The name should be clifton_court, + the concentration (variable) should be ec or volume and the + interval should be 1day. Avoid redundancy or use of the source + in the output name: i.e. use "clifton_court" for the name, not + "clifton_ag" or "clifton_ec" . Because the source information is + recorded in the F part of output dss file.
    2. +
    3. Similar specification could be defined for channel source track + in block OUTPUT_CHANNEL_SOURCE_TRACK. Pick any channel you are + interested and do the definition.
    4. +
    +
  6. +
  7. +

    Run HYDRO and QUAL for One Year

    +
      +
    1. Using historical_hydro.inp, historical_qual_ec.inp, + historical_qual_vol.inp as the launch files, run HYDRO and + QUAL for one year in 2002. Start QUAL a day later to avoid mass + conservation errors in the first hour. Make sure the init_conc + variable (in SCALAR block) is set to zero so that there will be + no initial condition contribution for any variables (note: for a + volumetric fingerprint, it may be useful to make this + concentration 100 if you want to include initial conditions in + the fingerprint analysis).
    2. +
    3. Open the output file (historical.dss), and examine the + results.
    4. +
    +
  8. +
  9. +

    Process the output

    +
      +
    1. Use VISTA or HEC-DSSVUE to open up the output file. Copy + May-September concentrations source track of Clifton Court for + each location. Paste the output into a new sheet in the Excel + provided called excel_fingerprint.xls, which you can use as a + reference. Use the "stacked area plot" in Excel (one of the + standard Excel plot types) to plot up the fingerprint results.
    2. +
    +
  10. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/DSM2_Bay-Delta_Tutorial_3_Planning_Simulation/index.html b/tutorials/DSM2_Bay-Delta_Tutorial_3_Planning_Simulation/index.html new file mode 100644 index 00000000..1dda2ba1 --- /dev/null +++ b/tutorials/DSM2_Bay-Delta_Tutorial_3_Planning_Simulation/index.html @@ -0,0 +1,592 @@ + + + + + + + + + + + + + + + + + + DSM2 Bay-Delta Tutorial 3: Planning Simulation - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + +

DSM2 Bay-Delta Tutorial 3: Planning Simulation

+

Purpose: The goal of this tutorial is to learn to preprocess and launch a Bay-Delta planning simulation using CalSim Output as the basis for flow inputs.

+

The CalSim study we will use is the ocap_2005A01A_EWA2_71_novamp_DV.dss provided in the tutorials/data directory. We will prepare and launch the run using both temporary barriers and permanent barriers configurations (SDIP: South Delta Improvements Program).

+

Preparation

+

We will begin by creating a study space to house the planning study.

+
    +
  1. +

    Copy the study template:

    +
      +
    1. In windows, navigate to \{DSM2_home}\study_templates. Copy + the ocap_sdip template to \{DSM2_home}\tutorial\ocap_sdip. + Copy the ocap_temp_barrier template to + \{DSM2_home}\tutorial\ocap_temp_barrier
    2. +
    3. In each new study folder, create a directory called "output" if + there is not such a folder there already.
    4. +
    5. Copy the file ocap_2005A01A_EWA2_71_novamp_DV.dss from + \{DSM2_home}\timeseries to + \{DSM2_home}\tutorial\data\calsim. Note that we just put this + file in timeseries as a sample – in practice CalSim output will + be exterior to the DSM2 distribution (or will be in the study + folder).
    6. +
    +
  2. +
  3. +

    Preprocess for sdip and temp_barriers:

    +
      +
    1. Navigate to the ocap_sdip study directory and open + config_sdip_ocap_71.inp.
    2. +
    3. Make sure that the run dates are set to the full 1974-1991 + sixteen year planning period. It is a good idea to preprocess + the full period even if you want to run a subset of these dates.
    4. +
    5. Set the DSM2MODIFIER to ocap_sdip_tutorial.
    6. +
    7. Make sure that the DICU version in the configuration file is + 2005, representing a 2005 level of development.
    8. +
    9. Makes sure the STAGE_VERSION in the configuration file is + PLANNING-2-SL.  
    10. +
    11. Make sure the configuration file is pointing to the right data, + which means using the right directory, file and DSS path to find + the CalSim results. In this case, set:
        +
      1. CALSIMNAME to ocap_2005A01A_EWA2_71_novamp_DV (CalSim output + file without the ".dss" extension)
      2. +
      3. CALSIMSTUDY_ORIGINAL to 2005A01A
      4. +
      5. ~~ CALSIMDIR to ../data/calsim ~~
      6. +
      +
    12. +
    13. Save your data
    14. +
    15. Launch the preprocessing system. Obtain a command prompt and + type:
    16. +
    +
  4. +
+

> prepro config_sdip_ocap_71.inp

+
    +
  1. +
      +
    1. Repeat the steps above for the temporary barriers directory and + the configuration file config_ocap_temp_barriers.inp. Make + sure that the dates span the full 1974-1991 period and repeat + the checks (d) and (e) for the temporary barrier configuration + file.
    2. +
    3. Set the DSM2MODIFIER to ocap_temp_barrier_tutorial.
    4. +
    5. Launch the preprocessor with the command:
    6. +
    +
  2. +
+

> prepro config_ocap_temp_barriers.inp

+
    +
  1. Run DSM2:
      +
    1. In Windows Explorer, navigate to the directory, + \{DSM2_home}\tutorial\ocap_sdip
    2. +
    3. Open the launch files hydro.inp and qual_ec.inp.
    4. +
    5. Set the dates to a shorter period, 1974-1976, so that the run + will take reasonable time for the tutorial. Note that we always + preprocess the full period even when we attempt to shorten the + run.
    6. +
    7. Run the sdip simulation, for hydro and qual by typing:
    8. +
    +
  2. +
+

> hydro hydro.inp,~~ocap_sdip_tutorial.dss ~~ +> qual qual_ec.inp

+
    +
  1. +
      +
    1. Uncomment and Repeat these steps (a-c) and run hydro and qual + for the temporary barrier simulation.
    2. +
    +
  2. +
  3. +

    Examine the output:

    +
      +
    1. The temporary barriers and permanent barriers protect water + levels in the South Delta in very different ways. Compare the + output at ROLD059, Old River at Tracy Blvd for your two runs to + see the differences.
    2. +
    +
  4. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/index.html b/tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/index.html new file mode 100644 index 00000000..f4886a27 --- /dev/null +++ b/tutorials/DSM2_Bay-Delta_Tutorial_4_Batch_Preprocessing/index.html @@ -0,0 +1,773 @@ + + + + + + + + + + + + + + + + + + DSM2 Bay-Delta Tutorial 4: Batch Preprocessing - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + +

DSM2 Bay-Delta Tutorial 4: Batch Preprocessing

+

Purpose: This tutorial will demonstrate how to preprocess a number of CalSim output files, each of which represents a different alternative – we will look at three alternatives, but the techniques apply to large numbers of alternatives just as well. In the process of this tutorial, you should become more familiar with how DSM2 and CalSim label their simulations and scenarios and a learn a little bit about batch files

+

CalSim Files: A typical situation with planning studies is that the input scenarios are represented by different CalSim output files. Sometimes these files reside in a directory structure that follows a pattern, for instance the first two alternatives might look like this:

+

C:/calsim

+

/altname1
+/dss
+/d1641
+2020d09edvsa.dss
+/altname2
+/dss
+/d1641
+2020d09edvsa.dss
+Note that this scheme CalSim uses directory structure to differentiate +its output – the files and pathnames are identical.
+Another system you may encounter is one where the CalSim files +themselves are named after the scenario:

+

C:/calsim

+

/altname1_2020d09edvsa.dss
+/altname2_2020d09edvsa.dss

+

Preprocessor requirements:

+

The DSM2 preprocessing scheme requires three pieces of information for +each scenario:

+
    +
  1. The DSM2 name we want to give the scenario (will become + DSM2MODIFIER).
  2. +
  3. The directory in which the CalSim output is found (will become + CALSIMDIR)
  4. +
  5. The name of the CalSim file (minus the .dss part – will become + CALSIMNAME)
  6. +
+

So for the first example above
+DSM2MODIFIER=altname1
+CALSIMNAME=2020d09edvsa
+CALSIMDIR= c:/calsim/altname1/dss/d1641
+How you will get this information into the preprocessing system depends +on approach. We will look at two, but if you are an experienced script +writer you will immediately see lots of possibilities.

+

Two approaches for batch jobs:

+

For larger studies, you have some choices as to how to set things up. +We'll look at a few that may help you get started, while experienced +script writers are likely to come up with numerous interesting +variations. These exercises will guide you in setting up modest batch +processing and familiarize you a bit more with the concept of +environmental variables at the command line and in windows "batch" +scripts (files with a *.bat extension that list commands for the +command line).

+
    +
  1. You can create configuration files for each alternative, e.g. + config_alt1.inp, config_alt2.inp. In each configuration file you + hard-wire the information that is required is hardwired for that + scenario. This method record of each scenario for people who inherit + your study. It is a good choice when the number of alternatives is + small. It is also a good choice when things other than CalSim vary + between alternatives.
  2. +
  3. Alternatively, you can create a single configuration file that + points the three scenario-related variables to generic values. Then + you use a batch_prepro.bat script to loop through the scenarios. + When the number of simulations is very large (say 100 climate change + scenarios) and the only difference in the inputs is CalSim, this + method is efficient.
  4. +
+

Now let's go through the exercises and check out the details.

+

Method 1: Using separate configuration files:

+
    +
  1. Create the configuration files:
      +
    1. In windows, navigate to \{DSM2_home}\tutorial\ocap_sdip. The + alternatives we are using have generic sounding names, but they + are compatible with OCAP assumptions.
    2. +
    3. Copy the configuration file config_sdip_ocap_71.inp to + config_alt1.inp
    4. +
    5. Make sure the study dates cover the full 1974-1991 period for + planning runs. It is usually a good idea to preprocess the whole + period, even if you are going to do run dsm2 on a subset of the + simulation period.
    6. +
    7. Replace the three variables indicated below. The three lines may + not be next to one another.
    8. +
    +
  2. +
+

\<file config_alt1.inp>
+ENVVAR
+NAME VALUE
+[other definitions…] # NOTE: LINES SHOWN MAY NOT

+
    +
  • +
      +
    1. BE TOGETHER*
      + CALSIMNAME 2005a01edv # File name, minus .dss
      + DSM2MODIFIER alt1 # DSM2 name for alternative
      + CALSIMDIR ..data/calsim/alt1 # CalSim output directory
      + END
    2. +
    +
  • +
  • +
      +
    1. Copy the file config_alt1.inp to config_alt2.inp. Repeat + step (d) using alt2 as the DSM2MODIFIER.
    2. +
    3. Prepare hydro.inp and qual.inp to handle a generic + configuration file by making the name of the configuration file + at the top of each an ENVVAR. We will be providing this from the + command line or batch file – as an operating system + environmental variable.
    4. +
    +
  • +
+

\<file hydro.inp>
+CONFIGURATION
+${CONFIGFILE} # Changed
+END
+… [other data]

+
    +
  1. +
      +
    1. Prepare a batch file for preprocessing. It will have one line + per alternative. Notice the "call" statement – this is the best + way to call a succession of other batch files (prepro is itself + a batch file called prepro.bat).
    2. +
    +
  2. +
+

\<file study_prepro.bat>
+call prepro config_alt1.inp
+call prepro config_alt2.inp

+
    +
  1. +
      +
    1. At the command prompt, launch the preprocessing by typing:
    2. +
    +
  2. +
+

> study_prepro.bat

+
    +
  1. +
      +
    1. Now create a batch file that launches QUAL and HYDRO for every + alternative in the study. For each alternative, you must set the + environment variable CONFIGFILE, then launch the models.
    2. +
    +
  2. +
+

\<file study.bat>
+SET CONFIGFILE=config_alt1.inp
+hydro hydro.inp
+qual qual_ec.inp
+SET CONFIGFILE=config_alt2.inp
+hydro hydro.inp
+qual qual_ec.inp

+
    +
  1. +
      +
    1. Launch the study batch file by typing at the command prompt:
    2. +
    +
  2. +
+

> study.bat

+

Method 2: Batch file that loops

+
    +
  1. *Create a generic configuration file:*
      +
    1. In the looping method, we are going to describe the alternatives + in a text file and loop through the text file. First we need a + configuration file that is generic. Let's begin by copying + config_sdip_ocap_71.inp one more time to a file called + config_study.inp. Change the 3 variables (DSM2MODIFIER, + CALSIMNAME and CALSIMDIR) as follows.
    2. +
    +
  2. +
+

\<file config_study.inp>
+ENVVAR
+NAME VALUE
+[other definitions…]
+CALSIMNAME ${BATCH_CALSIMNAME} # File name, minus .
+DSM2MODIFIER ${BATCH_DSM2MODIFIER}
+CALSIMDIR ${BATCH_CALSIMDIR} # CalSim output directory
+dss

+
    +
  1. +

    DSM2 name for alternative
    +[other definitions…]
    +END

    +
  2. +
  3. +

    Create the scenarios.txt file

    +
      +
    1. In the study folder, create a file called scenarios.txt
    2. +
    3. On each line of the file, put the scenario name (DSM2MODIFIER), + directory (CALSIMDIR) and file name (CALSIMNAME) minus the + ".dss" extension.
    4. +
    +
  4. +
+

\<file scenarios.txt>
+alt1,../data/calsim/alt1,2005a01edv
+alt2,../data/calsim/alt2,2005a01edv

+
    +
  1. Launch batch_prepro.bat
      +
    1. In the study directory, obtain a command prompt and type:
    2. +
    +
  2. +
+

> batch_prepro config_study.inp scenarios.txt

+
    +
  1. +
      +
    1. Note: if the batch_prepro script fails for a particular scenario + after running others successfully, first fix the problem and + eliminate the failed (half-processed) scenario. Then avoid + re-running the successful scenarios by adding the "resume" tag, + for example:
    2. +
    +
  2. +
+

> batch_prepro config_study.inp scenarios.txt resume
+If you type this command now, batch_prepro.bat will harmlessly do +nothing.

+
    +
  1. Examine and use the preprocessing products
      +
    1. The preprocessing product is a HEC-DSS file for each scenario in + the local time series directory. You should have one file per + scenario.
    2. +
    3. If you are doing this tutorial on your own, you may choose to + launch dsm2 on each alternative. To do this, change the + configuration file in hydro.inp and qual_ec.inp to the + generic one:
    4. +
    +
  2. +
+

\<file hydro.inp>
+CONFIGURATION
+config_study.inp
+END

+
    +
  1. +
      +
    1. Use batch_run.bat with the same syntax as you did for + batch_prepro::
    2. +
    +
  2. +
+

> batch_run config_study.inp
+Note that you may need to modify this script if you use it for something +other than qual_ec. We may not be able to run the simulations in class +because of the time required – but if you have extra time, change the +dates to a one year (1991) and try it out.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/index.html b/tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/index.html new file mode 100644 index 00000000..3934f724 --- /dev/null +++ b/tutorials/DSM2_Bay-Delta_Tutorial_5_Suisun_Marsh_Operating_Rules/index.html @@ -0,0 +1,841 @@ + + + + + + + + + + + + + + + + + + DSM2 Bay-Delta Tutorial 5: Suisun Marsh Operating Rules - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 Bay-Delta Tutorial 5: Suisun Marsh Operating Rules

+

Purpose: The objective of this tutorial is to learn about the Suisun Marsh Salinity Control Gate and practice tuning an operating rule.

+

Background:

+

The Suisun Marsh Salinity Control Gates (SMSCG) were completed and began operating in October 1988. The first year of operation was used to test the gates, and official operation began in November 1989. The facility consists of a boat lock, a series of three radial gates, and flashboards. The SMSCG control salinity by restricting the flow of higher salinity water from Grizzly Bay into Montezuma Slough during incoming tides and retaining lower salinity Sacramento River water from the previous ebb tide. Operation of the SMSCG in this fashion lowers salinity in Suisun Marsh channels and results in a net movement of water from east to west. When Delta outflow is low to moderate and the SMSCG are not operating, net movement of water is from west to east, resulting in higher salinity water in Montezuma Slough.

+

The SMSCG usually begin operating in early October and, depending on salinity conditions, may continue operating through the end of the control season in May. When the channel water salinity decreases sufficiently below the salinity standards, or at the end of the control season, the flashboards are removed and the SMSCG raised to allow unrestricted movement through Montezuma Slough. Details of annual SMSCG operations can be found in Summary of Salinity Conditions in Suisun Marsh During Water Years 1984–1992 (DWR 1994b), or the Suisun Marsh Monitoring Program Data Summary produced annually by DWR's Environmental Services Office.

+

The tidal operation of the gate should open the gate when a water level drop of 0.3 ft exists across the gate (upstream to downstream) and to close the gate when velocity is less than 0.1 (impending flood tide). The boat lock is held open whenever the radial gates are operated tidally. The flashboard is typically in place when the gates are operated and removed when the gate is fully open. Note that in the historical record these relationships do not always hold – there have been numerous operating experiments.

+

We will use Martinez EC as a surrogate to determine when EC compliance is an issue and the gates need to be operated tidally.

+

A simplified version of the Marsh standards is given below. The units are millisiemens per square cm, which are a thousand times the microsiemens used in DSM2 modeling. There is a water year dependence in the full set of standards – the levels given in the table apply to 1974-1976 but not to 1977 which is a "deficiency year". In this tutorial, we will consider only the site S-42, Suisun Slough @ Volanti Slough. The RKI for this location is SLSUS012, and the location is channel 494 distance 4681ft.

+ +++++++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+

 

+


+


+

D-1641 STANDARD


+


+


+


+


+


+

OCT

NOV

DEC

JAN

FEB-MAR

APR-MAY


+


+

Eastern


+


+


+


+


+


+


+


+

C-2

19.0

15.5

15.5

12.5

8.0

11.0


+


+

S-64

19.0

15.5

15.5

12.5

8.0

11.0


+


+

S-49

19.0

15.5

15.5

12.5

8.0

11.0


+


+

Western


+


+


+


+


+


+


+


+

S-42

19.0

16.5

15.5

12.5

8.0

11.0


+


+

S-21

19.0

16.5

15.5

12.5

8.0

11.0


+


+

S-35

N/A*

N/A*

N/A*

N/A*

N/A*

N/A*


+


+

S-97

N/A*

N/A*

N/A*

N/A*

N/A*

N/A*


+


+


+


+


+


+


+


+


+


+


+

*In a good faith effort, DWR will
+consider S35 and S97
+monitoring stations

*In a good faith effort, DWR will consider +S35 and S97 monitoring stations


+


+


+


+


+


+

when deciding gate operations.

when deciding gate operations.


+


+


+


+


+


+


+

+ +

+

Steps:

+
    +
  1. Copy the study and configuration files:
      +
    1. In windows, navigate to + \{DSM2_home}\study_templates\ocap_sdip.
    2. +
    3. Copy the planning study to the \{DSM2_home}\tutorials + directory.
    4. +
    5. Rename config_sdip_ocap_71.inp as config_suisun.inp.
    6. +
    7. In the configuration file, make sure the study dates cover the + full 1974-1991 period for planning runs. It is usually a good + idea to preprocess the full period of the inputs, even if you + are going to do run dsm2 on a subset of the simulation period.
    8. +
    9. Set DSM2MODIFIER to suisun.
    10. +
    11. Run prepro on the file config_suisun.inp:
    12. +
    +
  2. +
+

> prepro config_suisun.inp

+
    +
  1. Examine and correct the Suisun Marsh operating rule.
  2. +
+

The Montezuma Slough velocity close rule in +oprule_montezuma_planning_gate.inp is based on flow (note this file +name will have a version date appended to it. The rule requires +correction to be based on velocity.

+
    +
  1. +
      +
    1. Add a file representing a "correction layer" to the operating + rules called oprule_revised_montezuma.inp.
    2. +
    3. Correct the velclose part of the rule to be based on channel + velocity. You can look up the correct variable name in the + Operating Rule Guide in the html help system.
    4. +
    5. Note the Martinez EC path used in the operating rule to + determine whether the gate needs to be operated tidally. Open + the suisun.dss input file and tidally or daily average this + path. Then substitute the tidally averaged version of EC in the + operating rule by overriding the time series definition in the + Operation Time Series table.
    6. +
    7. Note that the threshold for operating the gate is in the + configuration file: MSCS_EC_THRESH 20000
    8. +
    +
  2. +
  3. +

    Run DSM2:

    +
      +
    1. In the configuration file, set the run dates to 1974 – 1977.
    2. +
    3. Add the output you will need to examine the S42 site using the + information given in the introduction and the techniques you + have learned from the other tutorials.
    4. +
    5. Point the CONFIGURATION include file in hydro.inp to + config_suisun.inp.
    6. +
    +
  4. +
+

d. Launch HYDRO with the command:
+>hydro hydro.inp

+
    +
  1. Examine the output.
      +
    1. Compare EC output to the standard presented in the introduction. + Is the gate over operating or underoperating?
    2. +
    3. How can you further enhance the operating rule? Discuss the + boatlock and flashboards.
    4. +
    +
  2. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/DSM2_Bay-Delta_Tutorial_7_Clifton_Court_Diurnal_Pumping/index.html b/tutorials/DSM2_Bay-Delta_Tutorial_7_Clifton_Court_Diurnal_Pumping/index.html new file mode 100644 index 00000000..e6647ffd --- /dev/null +++ b/tutorials/DSM2_Bay-Delta_Tutorial_7_Clifton_Court_Diurnal_Pumping/index.html @@ -0,0 +1,734 @@ + + + + + + + + + + + + + + + + + + DSM2 Bay-Delta Tutorial 7: Clifton Court Diurnal Pumping - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + +

DSM2 Bay-Delta Tutorial 7: Clifton Court Diurnal Pumping

+

Purpose: The goal of this tutorial is to learn how to implement a diurnal pumping quota for Banks pumping (State Water Project). In the process you will learn how to track totals using the ACCUMULATE function.

+

Background: The Banks pumping facility is often operated on a diurnal schedule, emphasizing pumping during off-peak electricity hours. An example of summer electricity prices for the year 2005 is shown in Figure 1:

+


+Figure 1: Example wholesale electricity prices in July 2005 (CWEMF, KT +Shum)

+

An idealized schedule from the point of view of electricity would be to pump the maximum possible amount late at night until the daily pumping needs are satisfied. Actual hourly variations in pumping are shown in Figure 2. Numerous other factors (e.g. ensuring minimum stage requirements in the Forebay) can affect instantaneous maximum pumping, which is why we might consider an operating rule instead of a simple time series to model diurnal pumping.

+

In this tutorial we will emulate the ideal pumping schedule by tracking +the amount pumped since midnight and quitting once we have pumped a +total that satisfies the daily average requested by CALSIM. We will use +the ACCUMULATE function to track the total. In a later step we will +attenuate pumping to avoid drawing Clifton Court Forebay below -2ft +NGVD.
+
+Figure 2: Diurnal Variation in Pumping, July-August 2004 (CWEMF, KT +Shum)

+

The planning study we will use for this tutorial is ocap_sdip *provided in the study_templates directory. The choice between temporary and permanent barriers is not central to the material, though the SDIP project did propose higher pumping.*

+

Preparation

+

We will begin by creating a study space to house the planning study.

+
    +
  1. +

    Copy the study template:

    +
      +
    1. In windows, navigate to \{DSM2_home}\study_templates. Copy + and rename the ocap_sdip template to + \{DSM2_home}\tutorial\ocap_sdip_diurnal_swp.
    2. +
    3. If you have not already done so for a previous tutorial, copy + the file ocap_2005A01A_EWA2_71_novamp_DV.dss (CALSIM output + file used for planning runs) from \{DSM2_home}\timeseries to + \{DSM2_home}\tutorial\data\calsim. Note that we just put this + file in timeseries as a sample – in practice CalSim output will + be exterior to the DSM2 distribution (or should go in the study + folder).
    4. +
    +
  2. +
  3. +

    Preprocess for sdip barriers:

    +
      +
    1. Rename config_sdip_ocap.inp to + config_sdip_ocap_diurnal_ccfb.inp and open the file.
    2. +
    3. Make sure that the run dates are set to the full 1974-1991 + (01OCT1974 0000 – 01OCT1991 0000) sixteen year planning period. + It is a good idea to preprocess the full period even if you want + to run a subset of these dates.
    4. +
    5. Set the DSM2MODIFIER to diurnal_pumping.
    6. +
    7. Make sure that the DICU version in the configuration file is + 2005, representing a future (2005) level of development.
    8. +
    9. Makes sure the STAGE_VERSION in the configuration file is + PLANNING-2-SL.
    10. +
    11. Make sure the configuration file is pointing to the right + directory, file and DSS path to find the CalSim results. In this + case, set:
        +
      1. CALSIMNAME to ocap_2005A01A_EWA2_71_novamp_DV (CalSim output + file without the ".dss" extension)
      2. +
      3. CALSIMSTUDY_ORIGINAL to 2005A01A
      4. +
      5. CALSIMDIR to ../data/calsim
      6. +
      +
    12. +
    13. Save your data
    14. +
    15. Launch the preprocessing system. Obtain a command prompt and + type:
    16. +
    +
  4. +
+

> prepro config_sdip_ocap_diurnal_ccfb.inp

+
    +
  1. +

    Add output for Clifton Court Forebay:

    +
      +
    1. In hydro.inp, add output that will allow you to more directly + track the operations. Create an OUTPUT_RESERVOIR table. Create a + 15min instantaneous output request with clfct or + clifton_court as the name, clifton_court as the reservoir, + none as the connecting node and flow-source as the variable. + The flow-source output will give the total source and sink + inflow to Clifton Court – it will differ from SWP pumping only + by a small amount (due to Byron-Bethany Irrigation District).
    2. +
    +
  2. +
  3. +

    Run DSM2:

    +
      +
    1. In the configuration file, set the dates 01JAN1975 to 25JAN1975 + so that the run will take a short time. These dates will + generate the features we want for the tutorial, including a + period of low stage at Clifton Court Forebay under diurnal + operation. Note that we always preprocess the full period even + when we shorten the run.
    2. +
    3. Open hydro.inp file and change the included configuration file + to config_sdip_ocap_diurnal_ccfb.inp and save it.
    4. +
    5. Run the sdip simulation for HYDRO by typing:
    6. +
    +
  4. +
+

> hydro hydro.inp

+
    +
  1. Examine the output:
  2. +
+

Once you have run HYDRO, open the file and look at the flow-source +output for Clifton Court. This variable represents exports out of +Clifton Court Forebay, which are dominated by State Water Project +pumping..

+

Diurnal Operating Rule

+

1. Create the diurnal rule with no Forebay stage protection:

+
    +
  1. +
      +
    1. Create a file called oprule_diurnal_swp.inp. Create empty + OPERATING_RULE and OPRULE_EXPRESSION tables. Alternatively, do + this by copying, renaming and clearing the contents of another + operating rule input file.
    2. +
    3. Create an expression to accumulate daily State Water Project + (SWP) pumping since midnight:
        +
      1. Name: daily_total_swp
      2. +
      3. Definition: "ACCUMULATE(ext_flow(name=swp)*DT,0.0,HOUR==0)"
      4. +
      +
    4. +
    +
  2. +
+

This reads "accumulate swp, starting at zero, resetting when the hour of +the day is zero". We multiply by DT to get a volume (which makes the +rule time step independent and allows comparison to a daily target). The +time series reference comes from elsewhere in the input and is the daily +average pumping rate. It is perfectly acceptable to use time series that +are defined elsewhere in the DSM2 input without redefining it in the +OPRULE_TIME_SERIES table – the latter is just there to allow you to +define any additional time series you might need.

+
    +
  1. +
      +
    1. Create an expression to quantify the da.ily target. Note that we + are multiplying an average daily flow in cubic feet per second + by the number of seconds in the day to obtain a volume. + 1. Name: daily_target_swp + 2. Definition: ts(name=swp)*(60*60*24)
    2. +
    3. Create an expression that defines maximum physical SWP pumping + as a magnitude:
        +
      1. Name: max_swp_pumping
      2. +
      3. Definition: 9000.0
      4. +
      +
    4. +
    5. Now, in the OPERATING_RULE table create a rule that pumps the + maximum until the daily total is reached:
        +
      1. Name: swp_diurnal
      2. +
      3. Action: "SET ext_flow(name=swp) TO + IFELSE(abs(daily_total_swp) > abs(daily_target_swp), 0.0, + -max_swp_pumping)". Note the quotes and the minus sign: SWP + is really a sink, not a source.
      4. +
      5. Trigger: Use STARTUP or TRUE for the trigger (the two do the + same thing, and trigger exactly once at the beginning of the + run). The rule will be in use unless it is displaced by + another operating rule.
      6. +
      +
    6. +
    7. In hydro.inp, add the new oprule_diurnal_swp.inp file at the + bottom of the OPERATIONS include block..
    8. +
    9. Run HYDRO on the simulation. Examine the output for HYDRO, + including Clifton Court reservoir water levels, flow through the + gates to node 72 and the "flow-source" output for the reservoir + (which will differ from SWP pumping by a small amount due to + Byron-Bethany Irrigation District). Are you getting the + fully-on-fully-off pumping pattern you expect? Could the same + schedule be prepared off-line in advance using a 15-min time + series for SWP pumping? Does Clifton Court water surface go + below the "warning" level of -2.0ft NGVD needed to maintain flow + in the fish facilities?
    10. +
    11. Now create an expression identifying a low stage condition:
        +
      1. Name: ccfb_stage_low
      2. +
      3. Definition: res_stage(res=clifton_court) \< -2.0
      4. +
      +
    12. +
    13. Change the trigger for swp_diurnal to "NOT ccfb_stage_low", + including the quotes.
    14. +
    15. Create an expression that describes inflow into Clifton Court + from the outside channel:
        +
      1. Name: ccfb_inflow
      2. +
      3. Definition: res_flow(res=clifton_court,node=72)
      4. +
      +
    16. +
    17. Create a new operating rule that covers the critical case:
        +
      1. Name: swp_low_stage
      2. +
      3. Action:
      4. +
      +
    18. +
    +
  2. +
+

"SET ext_flow(name=swp) TO
+-min2(abs(ccfb_inflow),max_swp_pumping)"
+This rule sets exports equal to the inflow to Clifton Court, which +allows some pumping to continue as long as it does not further draw down +Clifton Court. A simple alternative would be just to set exports to +zero.

+
    +
  1. +
      +
    1. +
        +
      1. Trigger: ccfb_stage_low
      2. +
      +
    2. +
    +
  2. +
+

Note the minus sign, again because SWP exports are a sink rather than a +source. The absolute sign is there to make sure the minimum function is +not operating on any big transient negative flows.

+
    +
  1. +
      +
    1. Rerun HYDRO. Are you getting the results you expected? Does + Clifton Court stage go below -2.0? Are you still pumping + according to the expected pattern? Could you implement this + policy with a time series controlling SWP instead of an + operating rule?
    2. +
    +
  2. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/DSM2_Tutorial_Overview/index.html b/tutorials/DSM2_Tutorial_Overview/index.html new file mode 100644 index 00000000..1a2da765 --- /dev/null +++ b/tutorials/DSM2_Tutorial_Overview/index.html @@ -0,0 +1,632 @@ + + + + + + + + + + + + + + + + + + DSM2 Tutorial Overview - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

DSM2 Tutorial Overview

+

DSM2 Overview

+

DSM2 Modules

+
    +
  • +

    HYDRO

    +
  • +
  • +

    QUAL

    +
  • +
  • +

    *PTM*The Delta Simulation Model II (DSM2) is a one-dimensional + mathematical model for dynamic simulation of one-dimensional + hydrodynamics, water quality and particle tracking in a network of + riverine or estuarine channels. DSM2 can calculate stages, flows, + velocities, mass transport processes for conservative and + non-conservative constituents including salts, water temperature, + dissolved oxygen, and trihalomethane formation potential, and + transport of individual particles. DSM2 thus provides a powerful + simulation package for analysis of complex hydrodynamic, water + quality, and ecological conditions in riverine and estuarine + systems.
    + DSM2 currently consists of three modules: HYDRO, QUAL, and PTM. The + relationship between HYDRO, QUAL and PTM is shown in Figure 1. HYDRO + simulates one-dimensional hydrodynamics including flows, velocities, + depth, and water surface elevations. HYDRO provides the flow input + for QUAL and PTM. QUAL simulates one-dimensional fate and transport + of conservative and non-conservative water quality constituents + given a flow field simulated by HYDRO. PTM simulates pseudo 3-D + transport of neutrally buoyant particles based on the flow field + simulated by HYDRO. PTM has multiple applications ranging from + visualization of flow patterns to simulation of discrete organisms + such as fish eggs and larvae. A fourth module for sediment transport + is currently being developed.

    +

    HYDRO
    +1-D flow, velocity, depth, and water surface elevations*QUAL*
    +1-D fate and transport of conservative and non-conservative +constituents*PTM*
    +Pseudo 3-D transport of neutrally buoyant particles
    +Figure 1: Schematic of DSM2 Modules

    +

    Forecast

    +

    *Future Conditions*DSM2 Study Types
    +DSM2
    +Study Types

    +
  • +
  • +

    Historical

    +
  • +
  • +

    Forecasting

    +
  • +
  • +

    *Planning*DSM2 is usually used for three kinds of Delta + simulations: historic conditions, forecasting future conditions + (real-time), and planning studies (Figure 2 and Table 1). Each type + of DSM2 study is briefly described below
    + *Recreate Historic Conditions*Historical simulations replicate + past operations, hydrologic conditions, water quality and Delta + configurations. These historical simulations enable calibration and + validation of the model by comparison of simulation results and + field data. Historical simulations also augment available field data + to provide a more spatially and temporally complete representation + of the hydrodynamic and water quality conditions for that time + period.
    + Forecasting simulations, also known as real-time simulations, use + recent field data and forecast data to project Delta conditions into + the near future (typically one to ten weeks). Recently collected + historical data provide current conditions for the Delta. Recent + tidal elevations at Martinez are used with an astronomical tide + forecast to project the Martinez tide into the near future. + Corresponding hydrodynamic and water quality conditions in the Delta + are then simulated. Forecasting simulations can assist State Water + Project operations decisions.
    + *Planning Studies of Hypothetical Conditions*Delta planning + studies evaluate how hypothetical changes to factors such as + hydrologic regimes, water quality standards, system operations, and + Delta configurations may impact Delta conditions. To explore the + impacts of a given scenario under various hydrologic conditions, + DSM2 planning studies are typically run under a 16-year sequence of + Delta inflows and exports derived from statewide water transfer and + storage simulations using CalSim-II More information on CalSim-II + can be found on the web at https://water.ca.gov/Library/Modeling-and-Analysis/Central-Valley-models-and-tools/CalSim-II + . Planning simulations can use historical or astronomical tidal + data which incorporate influences of the spring-neap tidal cycle or + simulations can use an average repeating tide (typically the 19-year + mean tide). Planning simulations typically assess impacts of + proposed changes to Delta operations or configuration such as + modified reservoir releases or dredging of channels. Planning study + may also investigate impacts of hypothesized changes in the natural + environment such as sea level rise.
    +Historical
    + Replicate historical conditions*Forecasting*
    + Project conditions for the near future*Planning*
    + Hypothetical Delta changes*DSM2*
    +Modes of Operation

    +

    Figure 2: DSM2 Modes of Operation
    +Table 1: Parameter Descriptions for Three Modes of DSM2 +Application

    +

    ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + +

    Simulation +Parameter

    Replicate Historic +Conditions

    Forecasting Future +Conditions

    Planning Studies for Hypothetical +Conditions

    Boundary Tide

    Historic or astronomical tide

    Historic and projected astronomical forecast +tide

    Historic, astronomical

    Input Data

    Historic inflows and exports
    +Average Delta consumptive use

    Recent and current inflows and exports
    +Average Delta consumptive use

    CalSim-II statewide operations studies +provide inflows and exports
    +Average Delta consumptive use

    Simulation Period

    1990-2001 are currently possible

    1-10 weeks into the future

    1976-1991 sequence from CalSim-II statewide +operations studies

    +
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/Delta_Tutorial_8_-_Temperature_Simulation/index.html b/tutorials/Delta_Tutorial_8_-_Temperature_Simulation/index.html new file mode 100644 index 00000000..ed3dfbe7 --- /dev/null +++ b/tutorials/Delta_Tutorial_8_-_Temperature_Simulation/index.html @@ -0,0 +1,512 @@ + + + + + + + + + + + + + + + + + + Delta Tutorial 8 - Temperature Simulation - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Delta Tutorial 8 - Temperature Simulation

+

DSM2 can be used to simulate water temperature and transport of this +property. It also is influenced with suspended particles and bio matter +in the water and is provided as a module in DSM2. 

+
    +
  • Hari Could you help outline the steps for a tutorial in temperature + simulation ?
  • +
+

Step-by-step guide

+
    +
  1. +
  2. +
+ + + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/Delta_Tutorial_9_-_DO_Simulation/index.html b/tutorials/Delta_Tutorial_9_-_DO_Simulation/index.html new file mode 100644 index 00000000..195f1901 --- /dev/null +++ b/tutorials/Delta_Tutorial_9_-_DO_Simulation/index.html @@ -0,0 +1,508 @@ + + + + + + + + + + + + + + + + + + Delta Tutorial 9 - DO Simulation - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Delta Tutorial 9 - DO Simulation

+

DSM2 can be used to simulate dissolved oxygen levels in the water. This +tutorial shows how to setup the input, run and retrieve the output from +the model simulation. 

+
    +
  • HariCould you help me outline the steps involved in doing a DO + simulation ?
  • +
+

Step-by-step guide

+
    +
  1. +
  2. +
+ + + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/Tutorial_1_Channels/index.html b/tutorials/Tutorial_1_Channels/index.html new file mode 100644 index 00000000..9ab3753e --- /dev/null +++ b/tutorials/Tutorial_1_Channels/index.html @@ -0,0 +1,1172 @@ + + + + + + + + + + + + + + + + + + Tutorial 1: Channels - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Tutorial 1: Channels

+

Task
+Run DSM2 for a steady boundary condition flow and salinity +(EC-electrical conductivity) simulation for a simple straight channel +grid 
+Skills Gained

+
    +
  • +

    Get started with DSM2

    +
  • +
  • +

    Creating channels

    +
  • +
  • +

    Establishing initial and boundary conditions

    +

    The purpose of this tutorial is twofold: to get a start with the +DSM2 model and to get practice setting up channels. We will set up a +simple channel-only grid with simple constant boundary conditions +and run both HYDRO and QUAL. We will look at two formats for +entering cross-section geometry (the new DSM2 single file format and +CSDP [Cross Section Development Program] format) and we will +familiarize ourselves with the echo output file that gives you a +single-file complete record of all the input data used in a DSM2 +module. 
    +For the tutorial, the channels have the following configuration and +specifications:
    +Alt text /> +Alt text

    +

    Figure 1 - Simple channel configuration and specifications. 
    +Note that there are two cross-section geometries labeled A and B +which will be specified later in this tutorial. In all the channels +except Channel 5 the cross sections have been assigned at the +midpoint of the channel. In Channel 5 the cross-sections are +assigned at fractions 0.2 and 0.8 of the length of the channel +measured from the upstream end. The DSM2 grid map includes arrows +pointing from upstream to downstream, indicating the positive direction of flow.

    +

    + + + + + +
    +

    Overview of DSM2 Channel Cross +Sections
    +DSM2 assumes a piecewise linear cross-sectional bathymetry. Width, area +and wetted perimeter are tabulated according to elevation. Each +elevation lists the data (width) or cumulative data (wetted perimeter +and area) below the given elevation. Anything above the top elevation is +extrapolated using a slope given by a global scalar +called levee_slope.
    +
    + +Figure 2: Piecewise linear bathymetry
    +For instance, for a cross section half way downstream in a fictitious +channel 123 the five layers of a cross-section with elevations given by +Figure 2, might be tabulated:

    +
    +
    +
    XSECT_LAYER
    +CHAN_NO DIST ELEV AREA WIDTH WET_PERIM
    +123 0.5 -14.6 0.0 0.0 0.0
    +123 0.5 -9.2 216.0 80.0 102.5
    +123 0.5 -4.0 736.0 120.0 111.0
    +123 0.5 9.5 2410.0 160.0 142.3
    +123 0.5 12.0 3028.5 162.0 148.0
    +
    +
    +

    The above table is in the single-file DSM2 cross-section format. An +analogous table is produced by the Cross Section Development Program +(CSDP). We will practice using both in the tutorial. The +parameter levee_slope is seldom changed from its standard value +of 0.33.

    +

    +

    The following steps will instruct you on how to create the channels, +give them very simple boundary conditions and run the model.

    +
  • +
  • +

    Open the hydro input file and add parameters:

    +
      +
    1. For this tutorial, you will want to use Notepad++ + (recommended https://notepad-plus-plus.org/), Textpad + or Emacs – some text editor that will not add special markup to + your input.
    2. +
    3. Navigate to + the ${DSM2_home}\tutorial\simple\t1_channels directory and + this directory will be referred to as the tutorial + directory below.
    4. +
    5. Open the hydro.inp file using one of the text editors + recommended in 1a.
    6. +
    +
  • +
  • +

    In HYDRO, add the Scalar Runtime information:

    +
      +
    1. +

      DSM2 input files use a keyword based table structure. Each table + begins with a keyword on the first line and column headings + (called field headers) on the second line. There are as many + lines of data as needed in the middle of the table, and the + table closes with an "END" line and a carriage return.

      +
    2. +
    3. +

      In the hydro.inp file, locate the SCALAR table. Scalars are + name-value pairs that control the model or define constants and + runtime parameters. Some scalar parameters are already defined + in the sample file.

      +
    4. +
    5. +

      Add the following run date, run time and temporary directory + scalars at the top of the SCALAR table and save. 

      +

      + + + + + +
      +
      + +++ + + + + + +
      + +Spaces or tabs can be used between values +
      +
      +

      +
      SCALAR
      +NAME VALUE
      +run_start_date 01JAN1992 #scalars to be added
      +run_end_date 01MAR1992   #scalars to be added
      +run_start_time 0000      #scalars to be added
      +run_end_time 0000        #scalars to be added
      +temp_dir c:/temp
      +title "TUTORIAL SIMULATION ${DSM2MODIFIER}" # [other scalars already included in the file]
      +warn_unchecked false
      +END
      +
      +

      Note that temp_dir should be set to a location with ample disk +space for production runs. This is a scratch directory where +DSM2 stores cached results.

      +
    6. +
    +
  • +
  • +

    *In HYDRO, add Channel information:
    +
    *

    +

    Next we will add a table of channels, including connectivity, and +conveyance/dispersion parameters. We are also going to add the +cross-section geometry using the XSECT_LAYER section, which is +introduced in Version 8. (CSDP-styled input is discussed later).

    +
      +
    1. +

      The CHANNEL table requires: a channel number, length, + Manning's n, dispersion coefficient, node number to identify the + upstream end and node number at the downstream end. Type the + table and field headers for the CHANNEL table at the bottom of + the hydro.inp file:

      +
      CHANNEL
      +CHAN_NO LENGTH MANNING DISPERSION UPNODE DOWNNODE
      +
      +
    2. +
    3. +

      Open the file channel_tutorial_starter.txt and copy the data + for channels 1-6 and channel 2004 from the CHANNEL table of the + tutorial data file and paste it into the newly created CHANNEL + table in your hydro.inp file.

      +
    4. +
    5. +

      Type END after the last row to end the table.

      +
    6. +
    7. +

      Now create the XSECT_LAYER, table which will contain one row for + every vertical layer in every user-defined cross-section. This + table is new in Version 8, and is intended to allow input to be + represented in a single file and using a single input style – + making archives and comparisons simpler. Below the CHANNEL + table, create the skeleton for the XSECT_LAYER table:

      +
      XSECT_LAYER
      +CHAN_NO DIST ELEV AREA WIDTH WET_PERIM
      +[data will go here]
      +END
      +
      +

      + + + + + +
      + +Typically in DSM2 input files, the order of the +tables is not important. However, when one table refers to information +defined in another table, the "parent" table with the definition +typically appears first in the input file. In this case the CHANNEL +table must be before the XSECT_LAYER table. +

      +
    8. +
    9. +

      In the first row, we will start defining a cross-section for + channel #1. We will be entering three rows for Channel 1, each + of which will have a "1" in the CHAN_NO column. The data will be + located midway downstream along the channel, so in the Distance + (fraction) field, type 0.5. The three rows of data are given + below

      +
      XSECT_LAYER
      +CHAN_NO DIST ELEV AREA WIDTH WET_PERIM
      +1 0.5 -24.0 0.0 40.0 40.0
      +1 0.5 0.0 960.0 80.0 102.5
      +1 0.5 20.0 2640.0 160.0 192.0
      +
      +
    10. +
    11. +

      Copy and paste the three rows of data for Channel 1 three times + for Channels 2, 3 and 2004 and change the channel number. Note + that changing the channel number to 2004 will shift the data so + that it no longer lines up with rows above it. DSM2 reads the + values in order and doesn't care about the spacing, but you can + adjust the spacing for aesthetic reasons if you want and later + we will encounter dsm2_tidy a utility for tidying up the tables + automatically. Copy the three data lines one more time for + Channel 5, this time changing the Channel number to 5 and the + distance to 0.2.

      +
    12. +
    13. +

      There is an additional cross-section given for Channel 5, + cross-section "B". The cross section is located in Channel 5, + 0.8 of the way from the upstream end to the downstream end as + indicated on the schematic at the beginning of the tutorial. + Enter the cross section as shown below.

      +
      XSECT_LAYER
      +CHAN_NO DIST ELEV AREA WIDTH WET_PERIM
      +5 0.8 -20.0 0.0 60.0 60.0
      +5 0.8 -4. 1120.0 80.0 97.74
      +5 0.8 2.0 1660.0 100.0 121.06
      +5 0.8 10.0 2700.0 160.0 183.16
      +
      +
    14. +
    15. +

      Copy the cross section data from Channel 5 Distance 0.8 to use + it for Channel 6, but change the Distance to 0.5.

      +

       Make sure the table is terminated with an END line with a carriage + return and save your file. 

      +
    16. +
    +
  • +
  • +

    In HYDRO, set the Boundary information:
    +

    +

    In this section we are going assign very simple boundary conditions +to the upper and lower ends of the channel system. 
    + Note that if you do not set boundary conditions at the end of a +channel, a "no-flow" boundary (Q=0.0) is assumed.

    +
      +
    1. +

      The upstream boundary will be a constant inflow.

      +
    2. +
    3. +

      In hydro.inp, enter an input table for the inflow:

      +
      BOUNDARY_FLOW
      +NAME NODE SIGN FILLIN FILE PATH
      +upstream_flow 1 1 last constant 200.
      +END
      +
      +

      This line assigns a constant inflow of 200.0 cfs to the upstream +boundary. The NAME column will be used 1) to associate quality +inputs with inflows and 2) for prioritizing data in multiple +input files. The NODE field assigns the input to Node #1. The +FILLIN field is an instruction to the model as to how to +interpolate data in time, which is not relevant for a constant +value.
      + DSM2 assumes consistent units and typically simulates flows in +cfs.

      +
    4. +
    5. +

      Start an input table for the downstream stage boundary: The + headers FILE and PATH are more intuitive for time varying + boundary conditions where a file name and a file location (path) + are specified for a file that contains the time varying + information. For a constant boundary condition FILE is set to + "constant" and PATH is set to the boundary condition value

      +
    6. +
    7. +

      The downstream boundary will be a constant water surface (stage) + boundary.

      +
      BOUNDARY_STAGE
      +NAME NODE FILLIN FILE PATH
      +[data go here]
      +END
      +
      +
    8. +
    9. +

      In the BOUNDARY_STAGE table, enter the following values into the + appropriate fields and save:

      +
    10. +
    +

     Although spaces or tabs can be used, columns with spaces tend to +look better when opened in a different viewer. You can use the +dsm2_tidy utility to clean up columns and spaces. Type dsm2_tidy +--help at a command prompt for more info.

    +
      +
    1. +
        +
      1. +
          +
        1. +

          Input Name: downstream_stage

          +
        2. +
        3. +

          Node: 7

          +
        4. +
        5. +

          Fillin: Last

          +
        6. +
        7. +

          Input File: constant

          +
        8. +
        9. +

          Path/Value: 0.0

          +
        10. +
        11. +

          END the table and save the file.

          +
        12. +
        +
      2. +
      +
    2. +
    +
  • +
  • +

    *In HYDRO, set the Initial Conditions for stage and flow:
    +
    *

    +

    A default hydrodynamic initial condition is required for every +channel in DSM2.
    +The initial condition can be replaced using a restart file, but the +default must still be entered now. For each of the channels, the +stage and flow will be set to 0. These 0-values will be applied +at both the 0 and length (distance to downstream end of channel) +distances along the channel. With six channels, and two locations to +set the values, there will be a total of 12 rows.

    +
      +
    1. +
        +
      1. In the hydro.inp file, start the initial condition table:
      2. +
      +

      CHANNEL_IC
      +CHAN_NO DISTANCE STAGE FLOW
      +1 0 0.0 0.0
      +1 length 0.0 0.0
      +[further data will go here]
      +END

      +
    2. +
    +

    Copy the two lines of data and paste them into the input file for +all of the channels. Refer back to Figure 1 for the channel numbers.

    +
    +
    +
  • +
  • +

    **In HYDRO, Specify the Output Locations:
    +
    ****

    +

    Lastly, we specify the output locations. For this tutorial, we will +request flow and stage at the two boundaries, two locations along +Channel 2, and the beginning of Channel 2004. These choices will be +used to illustrate some points in a later tutorial when we look at +Layering. Feel free to add anything that interests you.

    +
      +
    1. +
        +
      1. In hydro.inp, create the skeleton OUTPUT_CHANNEL table + using the following header:
      2. +
      +

      OUTPUT_CHANNEL
      +NAME CHAN_NO DISTANCE VARIABLE INTERVAL PERIOD_OP FILE
      +[data will go here]
      +END

      +
    2. +
    3. +
        +
      1. The output request rows may be found in the file + output_channel_tutorial.inp. Copy them into hydro.inp.
      2. +
      3. Save and close the hydro.inp file.
      4. +
      +
    4. +
    +
    +
    +
  • +
  • +

    In QUAL, add the Scalar Runtime information:
    +

    +
      +
    1. +
        +
      1. The file qual.inp already has a SCALAR section. Add the + following run time and temporary directory SCALARS above the + others:
      2. +
      +

      SCALAR
      +NAME VALUE
      +run_start_date 02JAN1992
      +run_end_date 01MAR1992
      +run_start_time 0000
      +run_end_time 0000
      +temp_dir c:/temp
      +[Existing scalars]
      +END

      +
    2. +
    +
  • +
  • +

    *In QUAL, set the Boundary Concentration information:
    +
    *

    +
      +
    1. +
        +
      1. Boundary conditions in QUAL for the constituent ec are + specified in the NODE_CONCENTRATION table:
      2. +
      +

      NODE_CONCENTRATION
      +NAME NODE_NO VARIABLE FILLIN FILE PATH
      +END

      +
    2. +
    +

    The names of the inputs must be EXACTLY the same as given in hydro – +this is how input concentrations are matched with input flows.

    +
      +
    1. +
        +
      1. +

        In the Node Concentration table, add an upstream + concentration row. The name for this boundary condition must + match the corresponding boundary in hydro – this + name-matching is how flows and concentrations are paired. + See section 2.b for the NAME used in this tutorial and + Figure 1 for the node numbers. In the new row, enter the + following information into the appropriate fields: + 1. 1. Input Name: upstream_flow. + 2. Node: 1 + 3. Variable: ec + 4. Fillin: last + 5.  The period after the value is to indicate it is not + an integer.Input File: constant + 6. Path/Value: 200

        +

         DSM2 does not care what units are used for constituent + concentrations, but all concentrations must be in the same units. + For ec, uS/cm are typically used.

        +
      2. +
      +
    2. +
    3. +
        +
      1. In the Node_Concentration table in qual.inp, add a + downstream boundary concentration row. The downstream + concentration is going to be higher than the upstream one + since we are later going to turn this into a tidal boundary + in a later tutorial. Enter the following information into + the next row of the table: + 1. 1. Input Name: downstream_stage. + 2. Node: 7 + 3. Variable: ec + 4. Fillin: last + 5. Input File: constant + 6. Path: 30000
      2. +
      3. Save the current settings.
      4. +
      +
    4. +
    +
  • +
  • +

    *In QUAL, Specify Output Locations:
    +
    *

    +

    In QUAL, you can request 
    +1) concentration data, 2) concentration data with source tracking +or 3) flow and stage data (which can be confusing if not output at +the model time step). In this tutorial, our requests will +include ec at the two boundaries, two locations along Channel 2, +and the beginning of Channel 2004.

    +
      +
    1. +
        +
      1. In qual.inp, create a QUAL Output table:
      2. +
      3. In the OUTPUT_CHANNEL table, add the following lines:
      4. +
      +

      OUTPUT_CHANNEL
      +NAME CHAN_NO DISTANCE VARIABLE INTERVAL PERIOD_OP FILE
      +bnd_1 1 0 ec 15min inst ${QUALOUTDSSFILE}
      +bnd_6 6 length ec 15min inst ${QUALOUTDSSFILE}
      +chan2_half 2 7500 ec 15min inst ${QUALOUTDSSFILE}
      +chan2_length 2 length ec 15min inst ${QUALOUTDSSFILE}
      +chan2004 2004 0 ec 15min inst ${QUALOUTDSSFILE}
      +END

      +
    2. +
    3. +
        +
      1. Save and close the file.
      2. +
      +
    4. +
    +
  • +
  • +

    *Running HYDRO and QUAL
    +
    *

    +

    DSM2v8 runs hydro and qual sequentially. The hydrodynamic data from +the hydro run is an input to the qual simulation.

    +

     Hydro can be run without qual, but can qual be run without +hydro? 
    + The DSM2 tutorials assume that you have activated Microsoft's power +tool Open Command Window Here. To get this and other +recommended 3rd party extras for DSM2, go to +the Recommended Third Party Extras section of the DSM2 +documentation by clicking on the START menu and selecting START +MENU + Programs DSM2_v8  DSM2_documentation
    +If you do not want to install the Open Command Window Here tool, +then you can use a command shell and change directories to the +indicated directory. To open a command shell, click on the START +menu and select Run. In the box type cmd if it does not come up +as the default. Click on OK.

    +
      +
    1. +
        +
      1. +

        In Windows Explorer, navigate to the directory: _

        +

        Unknown macro: {DSM2_home}tutorialsimple{_}. + 2. shift+Right-click on the directory, t1_channels, and +select Open Command Window Here. + "> +

        +
      2. +
      +
    2. +
    3. +
        +
      1. In the command window, type: hydro hydro.inp and press + enter. +
      2. +
      +
    4. +
    +

    Note that several lines will appear in the command window very +quickly. There may then be a delay while data is processed. Then +"Starting hydro computations for time X" will appear. A successful +model run is completed after a "Normal program end" statement and +the command prompt returns.

    +
      +
    1. +
        +
      1. HYDRO will then run(it may take a few minutes) and create + an output.dss file in the same directory.
      2. +
      3. To run QUAL, in the command window, type: qual qual.inp.
      4. +
      5. QUAL will then run and add output to the output.dss file. + A successful qual run will produce a "Normal program end" + statement and return to the command prompt. Qual takes + longer to run than hydro did.
      6. +
      7. Open the output.dss file and examine the results.
      8. +
      +
    2. +
    +
  • +
  • +

    *CSDP style cross-sections
    +
    *

    +

    You can also run the model using cross-sections in the CSDP format. +This is the form most familiar to DSM2 users. Mixing CSDP format +with other formats may produce unpredictable results.
    + Two caveats. First, there are no rectangular cross-sections in +Version 8. The rectangular and irregular cross-sections in Version 6 +were not consistent: a regular cross-section and its equivalent +representation in the irregular format did not give the same result. +The discrepancy was due to different interpolation rules. In Version +8, we have dropped the "irregular" nomenclature because this is the +only kind of cross section we support. The practical consequence of +the change is that you are going to need a cross-section for every +channel, and to get this you will need a data set targeted at +Version 8. The Version 8 cross sections for the Delta are provided +in the advanced tutorials.
    +In the tutorial, you will find that the CSDP version of the cross +sections are represented in two files: xsect_a.txt and xsect_b.txt. +Recall that earlier in the tutorial the single file format cross +sections were specified in the hydro.inp file. Now we will create +a new launch file called hydro_csdp.inp that is going to reference +the text files instead of listing the data explicitly.

    +
      +
    1. +
        +
      1. Copy hydro.inp to hydro_csdp.inp   it doesn't matter what + you name the file, but don't skip this step
      2. +
      3. In hydro_csdp.inp, erase the XSECT_LAYER table and replace + it with the following XSECT table that will point to the + cross-section files.
      4. +
      +

      XSECT +CHAN_NO DIST FILE +1 0.5 xsect_a.txt +[other xsects go here] +END

      +
    2. +
    +

    When running DSM2v8, use either the Version 8 format (XSECT_LAYER +from section 3 in this tutorial) or use the CSDP format presented in +this part of the tutorial. DO NOT MIX AND MATCH IN ONE FILE.

    +
      +
    1. +
        +
      1. Create the table using the same channel-distance + combinations as we used before. Use cross-sections A and B + as designated in Figure 1.
      2. +
      3. In the IO_FILE table, change the name of the echoed output + file to hydro_echo_csdp.inp. As a bonus exercise you could + change the environmental variables to accomplish nearly the + same thing.
      4. +
      +
    2. +
    +
  • +
  • +

    *Rerun HYDRO and compare cross-sections formats
    +
    *

    +

    Now we want to run hydro with the alternate input from CSDP. To +verify that we get the same cross-sections using the CSDP format, we +are going to scrutinize the echo input file.

    +
      +
    1. +
        +
      1. Open the echoed input file from your first run. The file + name is channel_hydro_echo.inp. Do a search for + XSECT_LAYER. This file echoes the input used on your + previous run, and is what we are trying to match.
      2. +
      3. Rerun hydro using the command:
      4. +
      +
    2. +
    +
    +

    hydro hydro_csdp.inp

    +
    +
      +
    1. +
        +
      1. Compare the echoed cross-sections to those + in hydro_echo_csdp.inp Use your text editor or a "diff" + tool.
      2. +
      +
    2. +
    +
  • +
  • +

    *Run HYDRO using echoed input.
    +
    *

    +

    Finally, let's take a look at the echoed output file and verify that +it is an exact one-file replica of the *.inp data that went into +the run. This is a powerful archiving option.

    +
      +
    1. +
        +
      1. Rerun hydro using hydro.inp.
      2. +
      3. Open channel_hydro_echo.inp.
      4. +
      5. Locate the IO_FILE section and change the name of the echoed + input file (first entry) to echo_echo.inp.
      6. +
      7. Save and close channel_hydro_echo.inp
      8. +
      9. Run the model using channel_hydro_echo.inp. At a command + prompt type:
      10. +
      +
    2. +
    +
    +

    hydro channel_hydro_echo.inp

    +
    +
      +
    1. +

      Compare the output from your first run + (channel_hydro_echo.inp) to the second run (echo_echo.inp). + Are they the same?. 

      +
      +
    2. +
    +
  • +
+
+

+
    +
  1. Brain teasers
      +
    1. What is the actual delta-x between computational points for each + of the subreaches (channels 1-6)?
    2. +
    3. (Advanced – for hydrodynamics people) Why is the requested dx + the minimum spatial step for each reach? Isn't finer better? + Wouldn't you want to impose a maximum on how big dx can be?
    4. +
    5. Change the bottom elevation of one of the cross-sections in the + tutorial by lowering it 5ft. Do not alter the other vertical + layers in the cross-section. For a typical water surface you + will not be altering the properties of the cross-section. Can + you think of two ways you are changing the simulation? Are + they both "real"? What are the implications for representing a + dredged channel in a study?
    6. +
    +
  2. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/Tutorial_2_Reservoirs_Gates_Transfers/index.html b/tutorials/Tutorial_2_Reservoirs_Gates_Transfers/index.html new file mode 100644 index 00000000..5502255f --- /dev/null +++ b/tutorials/Tutorial_2_Reservoirs_Gates_Transfers/index.html @@ -0,0 +1,787 @@ + + + + + + + + + + + + + + + + + + Tutorial 2: Reservoirs, Gates, Transfers - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Tutorial 2: Reservoirs, Gates, Transfers

+

Task
+Add reservoirs, gates and object to object flow transfers to the simple +channel grid created in tutorial 1 
+Skills Gained

+
    +
  • Understanding of how reservoirs and gates are represented in DSM2
  • +
  • +

    Learn how to transfer flow from one reservoir or node to another + reservoir or node in DSM2
    + The purpose of this tutorial is to learn how to add reservoirs, + gates, and flow transfers to the simple channel-only grid created in + Tutorial 1 (Figure 1). The grid we are going to create has the + following configuration and specifications: The channel portion is + identical to the simple channel model from Tutorial 1. Note that + each tutorial is self contained, so it is not necessary to do + Tutorial 1 before completing this tutorial.
    + + Figure 1- Simple channel with a new reservoir, gate, and flow + transfer. 
    + The following steps will instruct you on how to create these new + features and add them to the simple channel system. 

    +

    DSM2 Definitions 
    +Reservoir
    +In DSM2, reservoirs are open bodies of water that store flow and are +connected to nodes by means of an energy-based equation. This means +that flow moves between the reservoir and its connected node or +channel whenever there is an energy imbalance (e.g. stage +difference). Reservoirs are considered instantly well-mixed. The +Reservoirs Table specifies the identity and physical properties of +the reservoir. Connections to nodes are specified in the Reservoir +Connections table. If it is desired to regulate flow between a +reservoir and its connected node or channel, a gate device is +used. 
    +In DSM2 applications for the Delta, reservoirs are used for actual +reservoirs such as Clifton Court Forebay and for open water bodies +such as flooded islands. 
    +Gate
    +In DSM2, gates are sites that present a barrier or control on flow. +A gate may have an arbitrary number of associated hydraulic devices +(pipes and weirs), each of which may be operated independently to +control flow. 
    +In DSM2 applications for the Delta, gates are used to represent the +Delta Cross Channel, the Montezuma Slough Salinity Control Gates, +and permanent or temporary barriers. 
    +Object to Object Flow Transfer
    +Transfers are direct water connections from a reservoir or node to +another reservoir or node. Transfers are instantaneous movements of +water (and its constituents and particles) without any detailed +description of physics or storage. The Transfer table specifies the +connectivity of the transfer. 
    +In DSM2 applications for the Delta, object to object transfers have +been used to represent proposed peripheral canal withdrawal and +outflow locations. 

    +
  • +
  • +

    Create the reservoir:

    +
      +
    1. In Windows Explorer, navigate to the + directory: \{DSM2_home}\tutorial\simple\t2_reservoir_gate_transfer.
    2. +
    3. Open hydro.inp. At the bottom of the file, Add the skeleton + for the reservoir table:
    4. +
    +
  • +
+

RESERVOIR
+NAME AREA BOT_ELEV
+END

+
    +
  1. +
      +
    1. Enter the following values into the appropriate fields:
      + 1. Name: res_1
      + 2. Area (million sq ft): 40
      + 3. Bottom elev (ft): -24
    2. +
    3. Note from Figure 1 that the reservoir has two connections; one + at Node 3, and one at Node 4. These will go in a child table + called RESERVOIR_CONNECTION. Some DSM2 input data tables are + related to each other in what is referred to as a parent/child + relationship. In the case of reservoirs, the RESERVOIR table is + the parent table and the RESERVOIR_CONNECTIONS table is the + child table that provides additional information related to the + information in the parent table. The parent table must appear in + the input file prior to the child table. The header has the + following form:
    4. +
    +
  2. +
+

RESERVOIR_CONNECTION
+RES_NAME NODE COEF_IN COEF_OUT
+END

+
    +
  1. +
      +
    1. Enter the following values into the appropriate fields for the + first connection:
      + 1. Res Name: res_1
      + 2. Node: 3
      + 3. Res Coef (in): 200
      + 4. Res Coef (out): 200
    2. +
    3. Enter the following values into the appropriate fields for the + second connection:
        +
      1. Res Name: res_1
      2. +
      3. Node: 4
      4. +
      5. Res Coef (in): 200
      6. +
      7. Res Coef (out): 200
      8. +
      +
    4. +
    5. Save the current settings.
    6. +
    +
  2. +
+

 To ensure conservation of mass at the beginning of a DSM2 simulation, +it is good practice to set appropriate initial conditions. It is +recommended to set all flows to zero and reservoir stage to zero.

+
    +
  1. Add Initial Conditions for the Reservoir:
      +
    1. Create the Reservoir Initial Conditions table:
        +
      1. The header and data are
      2. +
      +
    2. +
    +
  2. +
+

RESERVOIR_IC
+RES_NAME STAGE
+res_1 0.0
+END 

+
    +
  1. Create the Gate:
      +
    1. Now we are going to create the GATE table and its child table + GATE_DEVICE. Note from Figure 1 that the gate is located at Node + 2 of Channel 2. This gate consists of both a weir and a pipe. + Therefore, two rows of information will be needed for + the GATE_DEVICE table.
    2. +
    3. At the bottom of hydro.inp, add the skeleton for the GATE table:
    4. +
    +
  2. +
+

GATE
+NAME FROM_OBJ FROM_IDENTIFIER TO_NODE
+END

+
    +
  1. +
      +
    1. In the Gates table:
      + 1. Add a row and enter the following values into the + appropriate fields:
      + 1. Name: gate_1
      + 2. From object: channel
      + 3. From identifier: 2 [note that this 2 refers to channel + 2]
      + 4. to Node: 2 [note that this 2 refers to node 2]
      + 2. Create a GATE_WEIR_DEVICE table with the following fields:
    2. +
    +
  2. +
+

GATE_NAME, DEVICE, NDUPLICATE, WIDTH, ELEV, HEIGHT, CF_FROM_NODE, +CF_TO_NODE, DEFAULT_OP

+
    +
  1. +

    Enter the following values into the appropriate fields:

    +
      +
    1. Gate Name: gate_1
    2. +
    3. Device: weir
    4. +
    5. NDuplicate: 2
    6. +
    7. Width: 20
    8. +
    9. Elev: 2
    10. +
    11. Height: 9999.0
    12. +
    13. CF from Node: 0.8
    14. +
    15. CF to Node: 0.8
    16. +
    17. Default Op: gate_openNote: don't forget to close your + table with END. How many weirs does this gate have?
      + Hint: check out the value for number of duplicates
    18. +
    +
  2. +
  3. +

    Create a GATE_PIPE_DEVICE table by looking up the + appropriate headers in the DSM2 documentation. All + table headers have to be in capital letters.

    +
      +
    1. +

      Again, in the Gate Devices table:

      +
        +
      1. On a new line enter the following values into the + appropriate fields:
          +
        1. Gate Name: gate_1
        2. +
        3. Device Name: pipe
        4. +
        5. Number of duplicates: 2
        6. +
        7. Radius: 2
        8. +
        9. Elevation: 2
        10. +
        11. Flow coefficient from Node: 0.8
        12. +
        13. Flow coefficient to Node: 0.8
        14. +
        15. Default Operation: gate_open
        16. +
        +
      2. +
      +
    2. +
    3. +

      Save the current settings.   How would you change the gate device + table to only allow flow in one direction? Hint: review gate + operation options in the documentation.

      +
    4. +
    +
  4. +
  5. +

    Create the Transfer:

    +
  6. +
+

A transfer is a momentum-free transfer of water from one node or +reservoir to another node or reservoir. We are going to create a +continuous transfer of 40cfs of water from the reservoir res_1 to node +6.

+
    +
  1. Below the gate input, create the TRANSFER table
      +
    1. The headers are:
    2. +
    +
  2. +
+

TRANSFER
+NAME FROM_OBJ FROM_IDENTIFIER TO_OBJ TO_IDENTIFIER
+END

+
    +
  1. +

    Enter the following values into the appropriate fields:
    + 1. Name: transfer_1
    + 2. From Object: reservoir
    + 3. To identifier: res_1
    + 4. To Object: node
    + 5. To identifier: 6

    +
      +
    1. Save the current settings.
    2. +
    +
  2. +
  3. +

    Add the Transfer Flow Time Series:

    +
  4. +
+

We have created the transfer physically, but we have not assigned it a +flow. This is done on a separate table, so that the specifications of +the transfer can be used with different operations or hydrologies. Flow +will be 40cfs.

+
    +
  1. In hydro.inp, create the Transfer Time Series table:
      +
    1. The headers are:
    2. +
    +
  2. +
+

INPUT_TRANSFER_FLOW
+TRANSFER_NAME FILLIN FILE PATH
+END

+
    +
  1. +

    Enter the following values into the appropriate fields:
    + 1. Input Name: transfer_1
    + 2. Fillin: last
    + 3. Input File: constant
    + 4. Path/Value: 40

    +
      +
    1. Save the current settings.
    2. +
    +
  2. +
+

+ How would you change the flow transfer from a constant value to a time +varying value?
+ +Note: the values shown in the last two columns are descriptions of the +information that would go in that field; they are not actual field +values. See Basic Tutorial 4 for more information on using time series +data in DSM2. 

+
    +
  1. +

    Running HYDRO and QUAL

    +
      +
    1. +

      In Windows Explorer, navigate to the directory: _

      +

      Unknown macro: {DSM2_home}tutorialsimple{_}.
      + 2. Right-click on the directory, t2_reservoir_gate_transfer, and +select Open Command Window Here.
      + 3. In the command window, type: hydro hydro.inp.
      + 4. In the command window, type: qual qual.inp.
      + 5. Open the output.dss file in +the t2_reservoir_gate_transfer directory, and examine  the +results.

      +
    2. +
    +
  2. +
  3. +

    Brain teasers

    +
      +
    1. +

      The equation for inflow from a node to a reservoir through a + gate is as follows:   +

      +

      Lookup the equation for a reservoir +connection in the documentation. Write it down next to the gate +equation. Assuming they both represent the same basic orifice +physics, what terms in the gate equation does the reservoir +coefficient C lump together?
      + 2. Clifton Court Forebay has five duplicate radial gates connecting +it to the surrounding channel. Each has a crest elevation of +-10.1ft and a width of 20ft:
      +1. If water is at 0 ft and the five gates are open, what is the + area exposed to flow?
      +2. If the weirs are perfectly efficient (no loss, coefficients + of 1.0), what would be the equivalent "lumped" reservoir + coefficient for these gates?
      +3. DSM2 version 6 had a calibrated reservoir coefficient of + 1800^^ Note that the value of 1800 is the DSM2v6 value of + 2400 adjusted so that it matches the reservoir equation and + v8. In version 6 the coefficient was multiplied by an + undocumented 0.75 factor. DSM2v8 uses the reservoir + coefficient as specified.. Was this value physical given the + assumptions of the model? What alternate value might you + use? Why might the version 6 value have been acceptable – + are there explanations having to do with the model + assumptions?

      +
    2. +
    +
  4. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/Tutorial_3_Layering/index.html b/tutorials/Tutorial_3_Layering/index.html new file mode 100644 index 00000000..6c122e77 --- /dev/null +++ b/tutorials/Tutorial_3_Layering/index.html @@ -0,0 +1,1203 @@ + + + + + + + + + + + + + + + + + + Tutorial 3: Layering - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Tutorial 3: Layering

+

Task

+
    +
  • Separate DSM2 input data into multiple input files
  • +
  • Use layers in DSM2 to group related items
  • +
+

Skills Gained 
+Learn how to use layering in DSM2 to add, change and delete features in +a DSM2 simulation, for example including a new reservoir in a simulation

+

The purpose of this tutorial is to demonstrate the use of layering to +structure your project. Layers are part of the DSM2 data management +system. They allow input items to be grouped in logical bundles, and +allow changes to be brought into an old simulation without erasing or +altering archived items. At the same time we will neaten up our input by +dividing it into several files that are "included" from a fairly sparse +primary file. The layering concept will be demonstrated by adding a +"dummy" reservoir connected to nodes 5 and 6 (Figure 1) that will be +"turned on" or "turned off" in a simulation. We will also use +DSM2MODIFIER to differentiate between alternative simulations.

+

Alt text +Figure 1: Simple channel with a reservoir, gate, flow transfer and +dummy reservoir.

+
    +
  1. Convert the previous hydro.inp GRID items to external files
  2. +
+

In order to use layers, the input tables have to be gathered into +individual input files. 
+ Key points about layering:

+
    +
  • Each file represents a layer
  • +
  • Information in the launch file (hydro.inp or qual.inp) + supercedes all other input information.
  • +
  • For include blocks, files that are read later replace files that + are read earlier, in other words, if the same type of input + information exists in more than one file, the last information read + will overwrite the previously read values.
  • +
  • Overriding values is based on an identifier (e.g.NAME or NAME and + VARIABLE—identifiers are listed in table reference documentation)
  • +
  • Parent and child tables (e.g. channel and xsect) must be grouped in + the same file.
  • +
  • If a parent item is overridden, all of the child items associated + with the overridden parent item are ignored.
      +
    1. Move the channel and reservoir data:
        +
      1. Navigate to the t3_layering directory.
      2. +
      3. Create a new file in Notepad++ + called grid_tutorial_base.inp
      4. +
      5. Open hydro.inp.
      6. +
      7. Locate the CHANNEL and XSECT_LAYER tables + in hydro.inpCut them and paste them + into grid_tutorial_base.inp.
      8. +
      9. Locate the RESERVOIR and RESERVOIR_CONNECTION tables + in hydro.inpCut them and paste them + into grid_tutorial_base.inp and save the file. Note: leave + the RESERVOIR_IC in the hydro.inp file.
      10. +
      11. Similarly move the TRANSFER and GATE information + from hydro.inp to grid_tutorial_base.inp Be sure to + move the GATE child tables too. Leave the + INPUT_TRANSFER_FLOW table in the hydro.inp file.
      12. +
      13. Make sure the data tables listed above have been removed + from hydro.inp.
      14. +
      15. Now add these lines to hydro.inp that will tell DSM2 you + want to include data from other files and that these files + will contain GRID (channel, reservoir, transfer and gate) + tables and their child tables. Add the GRID table after the + IO_FILE block and before any of the initial condition + blocks.
      16. +
      +
    2. +
    +
  • +
+

GRID
+grid_tutorial_base.inp
+END
+ Be sure that there is a carriage return at the end of each *.inp +file. 

+
    +
  1. Running HYDRO and QUAL with grid information in separate files
  2. +
+

This simulation will serve as the base case for comparison for the other +simulations run in this tutorial. We will use the DSM2MODIFIER to +differentiate between the various simulations. DSM2MODIFIER is a special +ENVVAR definition that is automatically used by DSM2 to mark output (the +F Part of the DSS Path).

+
    +
  1. +
      +
    1. +

      In the ENVVAR section of hydro.inp and qual.inp, change + DSM2MODIFIER to layers_base and save the files.

      +
    2. +
    3. +

      In Windows Explorer, navigate to the directory: _
      + Unknown macro: {DSM2_home}tutorialsimple{_}

      +

      .

      +
        +
      1. +
          +
        1. Right-click on the directory, t3_layering, and + select Open Command Window Here. Note: for computers + running Vista, use a shift+right click on the directory + name to get the Open Command Window.
        2. +
        3. In the command window, type: hydro hydro.inp.
        4. +
        5. In the command window, type: qual qual.inp.
        6. +
        7. Note that many of output files all use the DSM2MODIFIER + in their name, e.g. layers_base.out. + The output.dss file distinguishes between scenarios by + using the DSM2MODIFIER in the F-Part. Open + the output.dss file in the t3_layering directory, + and examine the results.
        8. +
        +
      2. +
      3. +

        Creating a new reservoir:

        +
      4. +
      +

      In this section, we will learn how to add a feature by adding a +new reservoir. We don't want to mess too much with what we have +already, so we are going to add a dummy reservoir in our +grid_tutorial_base layer. Later in this tutorial, we will learn +how to use layers to disable this feature as well. 
      + The ability to mask and delete features such as reservoirs and +gates in DSM2 is often used in planning runs to "turn on" and +"turn off" features when studying planning alternatives. 

      +
        +
      1. +
          +
        1. +
            +
          1. Create a new Reservoir in grid_tutorial_base + 1. In tutorial_grid_base.inp, enter data for the + new reservoir below the data for res_1 + 2. Name: dummy_res + 3. Area (million sq ft): 60 + 4. Bottom elev (ft): -30
          2. +
          +
        2. +
        3. +

          In the Reservoir Connection table:

          +
            +
          1. Enter the following values into the appropriate + fields:
              +
            1. Reservoir name: dummy_res
            2. +
            3. Node: 5
            4. +
            5. Res Coef (in): 220
            6. +
            7. Res Coef (out): 220
            8. +
            +
          2. +
          +
        4. +
        5. +

          Again, in the Reservoir Connection table:

          +
            +
          1. Enter the following values into the appropriate + fields:
              +
            1. Reservoir name: dummy_res
            2. +
            3. Node: 6
            4. +
            5. Res Coef (in): 220
            6. +
            7. Res Coef (out): 220
            8. +
            +
          2. +
          +
        6. +
        7. +

          Save the current settings.

          +
        8. +
        +
      2. +
      3. +

        Running HYDRO and QUAL with the new reservoir

        +
      4. +
      +

      This simulation is our first alternative which adds a reservoir. +We will use the DSM2MODIFIER to differentiate this simulation +from the base simulation.

      +
        +
      1. +
          +
        1. In the ENVVAR section of hydro.inp and qual.inp, + change DSM2MODIFIER to layers_dummyres and save the + files.
        2. +
        3. In Windows Explorer, navigate to the directory: _
        4. +
        +
      2. +
      +

      .

      +
    4. +
    5. +

      Right-click on the directory, t3_layering, and select Open + Command Window Here. For Vista shift+right click on directory + to get the Open Command Window.

      +
    6. +
    7. +

      In the command window, type: hydro hydro.inp.

      +
    8. +
    9. +

      In the command window, type: qual qual.inp.

      +
    10. +
    11. +

      Note that many of output files all use the DSM2MODIFIER in their + name, e.g. layers_dummyres.out. Compare + the layer_base.out and the layer_dummyres.outechoed input + files to make sure that the dummy reservoir was included in the + simulation. The output.dss file distinguishes between + scenarios by using the DSM2MODIFIER in the F-Part. Open + the output.dss file in the t3_layering directory, and look + for results from the base run and from the new dummyres + simulation.

      +
    12. +
    +
  2. +
  3. +

    Disabling a reservoir using a revision layer

    +
  4. +
+

In this step of the tutorial, we will disable (remove) the dummy +reservoir from the simulation using a revision layer. Revision layers +allow the user to add or remove features for alternatives without +altering the base input files.

+
    +
  1. +
      +
    1. Create a Reservoir Revision Layer: + 1. Create a file called grid_tutorial_revision.inp. Add this + file to your GRID include-file section in hydro.inp, which + will now look like this:
    2. +
    +
  2. +
+

GRID
+grid_tutorial_base.inp
+grid_tutorial_revision.inp
+END
+ The include files will be prioritized in the order they are read, later +files replacing earlier ones. In this example, the information +in grid_tutorial_revision.inp has priority +over grid_tutorial_base.inp, thus any duplicate information +in grid_tutorial_revision.inp will override the information +in grid_tutorial_base.inp. When a parent table identifier (usually a +channel/node number or a "name") is overridden by a later file, its +original data (including child tables) will be ignored. Everything will +come from the higher priority layer.

+
    +
  1. +
      +
    1. +
        +
      1. Copy the reservoir table header and dummy reservoir data + from grid_tutorial_base.inp to grid_tutorial_revision.inp. + It is important to copy both the parent (RESERVOIR) and the + child (RESERVOIR_CONNECTION) tables into the revision layer. + 2. Add a carat (^ shift and 6 key) before the reservoir name in + the parent table. Your entry should look like this:
      2. +
      +
    2. +
    +
  2. +
+

RESERVOIR
+NAME AREA BOT_ELEV
+^dummy_res 60.0 -30.0
+END 
+RESERVOIR_CONNECTION
+RES_NAME NODE COEF_IN COEF_OUT
+dummy_res 5 220.0 220.0
+dummy_res 6 220.0 220.0
+END

+
    +
  1. +
      +
    1. By overriding the name "dummy_res" and also marking it unused, + you have now effectively removed dummy_res from the + calculations. The child table is automatically ignored as well + (so in a sense the entries there are unnecessary).  What is the + difference between commenting out "dummy_res" in the revision + layer and using a carat (^) in the revision layer?
      +Answer: Commenting out the reservoir in the revision layer + will be like the revision never existed and the information from + the original grid layer will be used in the simulation.
      + Using the carat (^) will "turn off" that reservoir for the + simulation. Neither the information in the original grid layer + or in the revision layer will be used in that simulation. Thus + using the carat is a way to "turn on or off" alternative + components.
    2. +
    3. Save the current settings.
    4. +
    +
  2. +
  3. +

    Running HYDRO and QUAL disabling the new reservoir

    +
      +
    1. +

      In the ENVVAR section of hydro.inp and qual.inp, change + DSM2MODIFIER to layers_nodummyres and save the files.

      +
    2. +
    3. +

      In Windows Explorer, navigate to the directory: _
      + Unknown macro: {DSM2_home}tutorialsimple{_}

      +

      .

      +
        +
      1. +
          +
        1. Right-click on the directory, t3_layering, and + select Open Command Window Here. For Vista shift+right + click on directory to get the Open Command Window.
        2. +
        3. In the command window, type: hydro hydro.inp.
        4. +
        5. In the command window, type: qual qual.inp.
        6. +
        7. Compare the layer_base.outlayer_dummyres.out, and + layer_nodummyres.out echoed input files and + the output.dss file. Are the results the same for the + base simulation and the no dummy reservoir simulation?
        8. +
        +
      2. +
      3. +

        Changing the properties of a reservoir

        +
      4. +
      +

      This part of the tutorial demonstrates how a revision layer can +be used to change the properties of a simulation. In this case +the area of reservoir 1 is increased.

      +
        +
      1. +
          +
        1. Altering the Properties of the Original Reservoir res_1: + 1. In the Reservoirs table of + grid_tutorial_revision.inp, change the Area + (million sq ft) field of res_1 from 40 to 50. + 2. Copy the RESERVOIR_CONNECTION entries for res_1 from + grid_tutorial_base to grid_tutorial_revision. The + revision layer should look similar to the one below.
        2. +
        +
      2. +
      +

      RESERVOIR
      +NAME AREA BOT_ELEV 
      +res_1 50.0 -24.0 
      +^dummy_res 60.0 -30.0
      +END 
      +RESERVOIR_CONNECTION
      +RES_NAME NODE COEF_IN COEF_OUT
      +res_1 3 200.0 200.0 
      +res_1 4 200.0 200.0
      +dummy_res 5 220.0 220.0 
      +dummy_res 6 220.0 220.0 
      +END  

      +

      Why is it necessary to copy the reservoir connection +entries to the revision file?
      +Answer: When you override a layer (file) with another entry +in a parent table that has the same identifier, you COMPLETELY +replace that item in the new layer including child items. In +other words, if the child table-RESERVOIR_CONNECTIONS in this +case-is not included in the revision layer, the reservoir will +have no connections. The values in the original grid layer will +not be read.

      +
        +
      1. +
          +
        1. Save the current settings.
        2. +
        +
      2. +
      3. +

        Running HYDRO and QUAL with increased area for + reservoir 1

        +
          +
        1. In the ENVVAR section of hydro.inp and qual.inp, + change DSM2MODIFIER to layers_larger_res1 and save the + file.
        2. +
        3. In Windows Explorer, navigate to the directory: _
        4. +
        +
      4. +
      +

      .

      +
    4. +
    5. +

      Right-click on the directory, t3_layering, and select Open + Command Window Here. For Vista shift+right click on directory + to get the Open Command Window.

      +
    6. +
    7. +

      In the command window, type: hydro hydro.inp.

      +
    8. +
    9. +

      In the command window, type: qual qual.inp.

      +
    10. +
    11. +

      Compare the output to the earlier simulations.

      +
    12. +
    +
  4. +
  5. +

    Changing the name of Channel 2004:

    +
  6. +
+

In this step, we will replace the channel number of Channel 2004. In +this case, what we are changing is the identifier itself, rather than +the parameters and data. So what we will do is delete Channel 2004 and +put in a Channel 4 that is identical. In the process, we will ignore +this change in other parts of the input and see what happens to initial +conditions and output requests that reference a non-existent channel.

+
    +
  1. +
      +
    1. Keep the grid_tutorial_revision file open.
    2. +
    3. Copy the channel and xsect data from grid_tutorial_base.inp to + the beginning of grid_tutorial_revision.inp. Keep only channel + 2004.
    4. +
    5. In grid_tutorial_revision.inp in the CHANNEL and XSECT tables, + copy the data for Channel 2004 and paste another copy into those + tables.
    6. +
    7. In one of your two copies of channel 2004, change the channel + number in both tables to 4.
    8. +
    9. Eliminate channel 2004 by prepending a carat in the CHANNEL + table. Your revision should look like this:
    10. +
    +
  2. +
+

CHANNEL
+CHAN_NO LENGTH MANNING DISPERSION UPNODE DOWNNODE
+4 15000 0.035 0.3 4 5 
+^2004 15000 0.035 0.3 4 5 
+END 

+

XSECT_LAYER
+CHAN_NO DIST ELEV AREA WIDTH WET_PERIM
+4 0.5 -24.0 0.0 40.0 40.0 
+4 0.5 0.0 960.0 80.0 91.22 
+4 0.5 20.0 2640.0 160.0 133.6 
+2004 0.5 -24.0 0.0 40.0 40.0 
+2004 0.5 0.0 960.0 80.0 91.22 
+2004 0.5 20.0 2640.0 160.0 133.6 
+END 

+
    +
  1. +
      +
    1. Save your work. Note that the entries in XSECT_LAYER for channel + 2004 in the grid_tutorial_revision.inp are redundant since the + channel was disabled. However it is good practice to always + include full parent/child table groups in the revision layer so + that choices can be turned "on" or "off."
    2. +
    +
  2. +
  3. +

    Add Initial Conditions for the New Channel 4:

    +
  4. +
+

Since there is no default initial condition for channel 4, we will have +to add one. Similar to the other channels, we will use a zero flow +initial condition.

+
    +
  1. +
      +
    1. Create a file called channel_ic_revision.inp.
    2. +
    3. Copy the CHANNEL_IC table headers from hydro.inp to the new + file.
    4. +
    5. Create two rows of data for channel 4:
    6. +
    +
  2. +
+

CHANNEL_IC
+CHAN_NO DISTANCE STAGE FLOW
+4 0 0.0 0.0
+4 length 0.0 0.0
+END 

+
    +
  1. +
      +
    1. In the hydro.inp file create an INITIAL_CONDITION include + block underneath the GRID include block:
    2. +
    +
  2. +
+

INITIAL_CONDITION
+channel_ic_revision.inp
+END

+
    +
  1. +
      +
    1. Now every channel has an initial condition. Do you need to do + something about the "extra" initial condition for Channel 2004? + Try and see.
    2. +
    +
  2. +
  3. +

    Running HYDRO and QUAL

    +
      +
    1. +

      In the ENVVAR section of hydro.inp and qual.inp, change + DSM2MODIFIER to layers_ch2004_to_ch4 and save the files.

      +
    2. +
    3. +

      In Windows Explorer, navigate to the directory: _
      + Unknown macro: {DSM2_home}tutorialsimple{_}

      +

      .

      +
        +
      1. +
          +
        1. Right-click on the directory, t3_layering, and + select Open Command Window Here.
        2. +
        3. In the command window, type: hydro hydro.inp.
        4. +
        5. In the command window, type: qual qual.inp.
        6. +
        7. Open the output.dss file in + the t3_layering directory, and examine the results.
        8. +
        9. Open layers_ch2004_to_ch4_hydro_echo.inp. This is an + "echoed input" that replicates your input verbatim, + except ENVVAR replacements have been made and all the + channel xsects are in the one-file format. You should be + able to run the model using this file as easily as with + the original hydro.inp. Take a look and see:
            +
          1. Did channel 4 get in the input?
          2. +
          3. Did channel 2004? What does this mean?
          4. +
          +
        10. +
        11. Look at the output.dss file. Did the output for channel + 4 get included in the output file? If not, what would + you change to get output for channel 4?
        12. +
        +
      2. +
      +

       Only output specified in the input files is written to the +output.dss file. However, output for all locations is recorded +in the hdf5 *.h5 output file. 

      +
        +
      1. Converting hydro.inp to input blocks
      2. +
      +

      Now let's convert hydro.inp completely to include files except +for the SCALAR and IO_FILE sections. In future tutorials, hydro +and qual simulations will be organized this way. The file +hydro.inp or qual.inp is usually reserved for scalar or +input/output file designations.

      +
        +
      1. +
          +
        1. In the previous section of this tutorial, an + INITIAL_CONDITION include block was created underneath + the GRID include block. We will create an initial + condition input file for the original initial conditions + and include that file here. Add the file ic_tutorial.inp + as the first line of the INITIAL_CONDITION include + block. The channel_ic_revision.inp file was already + included in this block in the previous section of this + tutorial.
        2. +
        +
      2. +
      +

      INITIAL_CONDITION
      +ic_tutorial.inp
      +channel_ic_revision.inp
      +END

      +
        +
      1. +
          +
        1. Create a file called ic_tutorial.inp
        2. +
        3. Cut (not copy) the CHANNEL_IC and RESERVOIR_IC data + from hydro.inp and paste it into this file.
        4. +
        5. Create an include block called HYDRO_TIME_SERIES as + follows, in hydro.inp.
        6. +
        +
      2. +
      +

      HYDRO_TIME_SERIES
      +input_boundary_hydro_tutorial.inp
      +input_transfer_flow_tutorial.inp
      +END

      +
        +
      1. +
          +
        1. Create a file + called input_boundary_hydro_tutorial.inp. Cut (not + copy) the BOUNDARY_STAGE and BOUNDARY_FLOW input + from hydro.inp to input_boundary_hydro_tutorial.inp.
        2. +
        3. Similarly, create a file called + input_transfer_flow_tutorial.inp. Cut and paste the + INPUT_TRANSFER_FLOW data into this file.
        4. +
        5. Create an include block called OUTPUT_TIME_SERIES.
        6. +
        +
      2. +
      +

      OUTPUT_TIME_SERIES
      +output_hydro_tutorial.inp
      +END

      +
        +
      1. +
          +
        1. Similarly, create the file called + output_hydro_tutorial.inp. Cut and paste the + OUTPUT_CHANNEL data into this file.
        2. +
        3. The remaining tutorials will use include blocks + extensively for both hydro and qual.
        4. +
        5. Save all of the files.
        6. +
        +
      2. +
      3. +

        Running HYDRO and QUAL with all include files

        +
          +
        1. In the ENVVAR section of hydro.inp and qual.inp, + change DSM2MODIFIER to layers_include_block
        2. +
        3. In Windows Explorer, navigate to the directory: _
        4. +
        +
      4. +
      +

      .

      +
    4. +
    5. +

      Right-click on the directory, t3_layering, and select Open + Command Window Here.

      +
    6. +
    7. +

      In the command window, type: hydro hydro.inp.

      +
    8. +
    9. +

      In the command window, type: qual qual.inp.

      +
    10. +
    11. +

      Open the output.dss file in the t3_layering directory, and + examine the results, comparing it to the last run. Did putting + things in input blocks change anything?

      +
    12. +
    13. Learning more
    14. +
    +
  4. +
+

Overriding is easy to understand. The main things you will need to keep +in mind are

+
    +
  1. Understanding how child table replacement works:
      +
    1. You can't replace the child element without replacing the + parent.
    2. +
    3. The children of an overridden parent element are never used.
    4. +
    +
  2. +
  3. What is the unique identifier for each row in a table? In most cases + this is the first field and it is usually a name or a map number (it + is a label rather than a piece of hard data). In some cases (e.g. + output), the unique identifier may be two fields such as NAME and + VARIABLE for output. Overriding only occurs when the identifier for + the row is duplicated. This information is available in the table + reference documentation in the "documentation" folder.
  4. +
  5. Which data can be included in which blocks. For instance, GRID can + contain CHANNEL, GATE, RESERVOIR and TRANSFER data. This information + is given in Table 1 on the next page.
  6. +
+ +

+
    +
  1. Brain Teaser
      +
    1. For the same change in elevation between the reservoir and + connecting node, which reservoir would have a higher flow, res_1 + or dummy_res?
    2. +
    +
  2. +
+

Table 1: Include Blocks for DSM2 Input Files

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Include +Block

Sections

CONFIGURATION 

ENVVAR 
+SCALAR

GRID

CHANNEL 
+XSECT (child) 
+XSECT_LAYER (child) 
+RESERVOIR 
+RESERVOIR_CONNECTION (child) 
+GATE 
+GATE_WEIR_DEVICE (child) 
+GATE_PIPE_DEVICE (child) 
+TRANSFER

GROUPS

GROUP 
+GROUP_MEMBER (child)

HYDRO_TIME_SERIES

INPUT_TRANSFER_FLOW 
+INPUT_GATE 
+BOUNDARY_STAGE 
+BOUNDARY_FLOW 
+SOURCE_FLOW 
+SOURCE_FLOW_RESERVOIR

INITIAL_CONDITION

CHANNEL_IC 
+RESERVOIR_IC 

OPERATION

OPERATING_RULE 
+
+OPRULE_EXPRESSION 
+OPRULE_TIME_SERIES

OUTPUT_TIME_SERIES

OUTPUT_CHANNEL 
+OUTPUT_RESERVOIR 
+OUTPUT_CHANNEL_SOURCE_TRACK 
+OUTPUT_RESERVOIR_SOURCE_TRACK 
+OUTPUT_GATE

PARTICLE

PARTICLE_INSERTION 
+PARTICLE_FLUX_OUTPUT 
+PARTICLE_GROUP_OUTPUT

QUAL_SPATIAL

RATE_COEFFICIENT

QUAL_TIME_SERIES

INPUT_CLIMATE 
+NODE_CONCENTRATION 
+RESERVOIR_CONCENTRATION

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/Tutorial_4_Time_Varying_Data/index.html b/tutorials/Tutorial_4_Time_Varying_Data/index.html new file mode 100644 index 00000000..013713de --- /dev/null +++ b/tutorials/Tutorial_4_Time_Varying_Data/index.html @@ -0,0 +1,742 @@ + + + + + + + + + + + + + + + + + + Tutorial 4: Time Varying Data - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Tutorial 4: Time Varying Data

+

Task
+Convert the boundary conditions and gate operations from constants to +time varying input data. 
+Skills Gained

+
    +
  • Learn about HEC-DSS as a time series data storage system
  • +
  • +

    Learn how HEC-DSS path names are used to reference time series in + DSM2 input files 
    + The purpose of this tutorial is to incorporate time-varying + information into the model. In the previous sections, all boundary + conditions and gate timings were set as constant, and no input files + were needed. In this section, the model is set to read time-varying + information stored in HEC-DSS files. 
    +  The U.S. Army Corps of Engineers' Hydrologic Engineering Center + Data Storage System, or HEC-DSS, is a database system designed to + efficiently store and retrieve scientific data that is typically + sequential. Such data types include, but are not limited to, time + series data, curve data, spatial-oriented gridded data, and others. + The system was designed to make it easy for users and application + programs to retrieve and store data. 
    + Data in HEC-DSS format can be viewed using special software + including VISTA (DWR), or HEC-DSSVue. Each time series is described + in the database using DSS Pathnames (see column headings in figure). + For DSM2 the pathnames are typically used as follows:
    +Calsim DSS Paths

    +

    A-Part: Data Source
    +B-Part: Location
    +C-Part: Variable
    +D-Part: Date range
    +E-Part: Data frequency 
    +F-Part: Description (in the sample shown the F-Part is the CalSim +run identifier.
    +For more information see the HEC-DSS +website.
    +dummy_res
    +reservoir connections with +Figure 1: Simple channel with a reservoir, gate, flow transfer +and dummy reservoir. 

    +
  • +
  • +

    Change the Transfer Flows to HEC-DSS input:

    +
  • +
+

The constant transfer flow from the previous tutorials will be changed +to a time series.

+
    +
  1. Create a new file in Notepad++ or another text editor called + input_hydro_ts_tutorial.inp
  2. +
  3. In the new file, create the TRANSFER_TIME_SERIES table:
  4. +
+

INPUT_TRANSFER_FLOW
+TRANSFER_NAME FILLIN FILE PATH
+END

+
    +
  1. Enter the following values into the appropriate fields: + 1. Input Name: transfer_1 + 2. Fillin: linear + 3. Input File: ${TUTORIALINPUT} + 4. Path/Value: /TUTORIAL/TRANSFER/FLOW//15MIN/CONSTANT/
  2. +
+

 The HEC-DSS pathnames are referred to using forward slashes
+/A-Part/B-Part/C-Part/D-Part/E-Part/F-Part/
+In the example above, the A-Part is Tutorial, the B-Part is TRANSFER, +etc. and the D-Part isn't specified. 

+
    +
  1. Open hydro.inp. The input file uses an ENVVAR reference as the + filename, so add the definition of TUTORIALINPUT. At the same + time, set DSM2MODIFIER to timevar_1:
  2. +
+

ENVVAR
+NAME VALUE 
+HYDROOUTDSSFILE output.dss 
+DSM2MODIFIER timevar_1 
+TUTORIALINPUT ../timeseries/tutorial.dss 
+END 

+
    +
  1. We are going to replace the existing time series with the new + file, so make sure it is listed below the other files as + follows.
  2. +
+

HYDRO_TIME_SERIES
+input_boundary_hydro_tutorial.inp
+input_transfer_flow_tutorial.inp
+input_hydro_ts_tutorial.inp
+END

+
    +
  1. Save the files.
  2. +
  3. +

    Open qual.inp and set DSM2MODIFIER to timevar_1 as well + (hydro.inp and qual.inp must agree or the tidefile won't be + found).

    +
  4. +
  5. +

    Running HYDRO and QUAL

    +
      +
    1. +

      In Windows Explorer, navigate to the directory: _
      + Unknown macro: {DSM2_home}tutorialsimple{_}

      +

      .

      +
        +
      1. Right-click on the directory, t4_timevar, and + select Open Command Window Here.
      2. +
      3. In the command window, type: hydro hydro.inp. Examine + timebar_1_hydro_echo.inp. Did the time series assignment + get used?
      4. +
      5. In the command window, type: qual qual.inp.
      6. +
      7. Open the output.dss file in + the t4_timevar directory, and verify that the results + are identical to the results from the previous tutorial + (located in the t3_layering directory). Why is this?
      8. +
      +

      Adjust DSM2MODIFIER to represent a variant scenario:

      +
        +
      1. In Windows Explorer, navigate to the + directory: \{DSM2_home}\tutorial\simple\t4_timevar
      2. +
      3. Open hydro.inp for editing.
      4. +
      5. In the ENVVAR section, change + the DSM2MODIFIER environment variable + from timevar_1 to timevar_2.
      6. +
      7. Open qual.inp for editing.
      8. +
      9. +

        In the ENVVAR section, change + the DSM2MODIFIER environment variable + from timevar_1 to timevar_2.

        +
      10. +
      11. +

        Add Source information into HYDRO:

        +
          +
        1. In input_hydro_ts_tutorial.inp, create the table for + node sources:
        2. +
        +
      12. +
      +

      SOURCE_FLOW
      +NAME NODE SIGN FILLIN FILE PATH
      +END

      +
        +
      1. Enter the following values into the appropriate fields: + 1. Name: source1 + 2. Node: 5 + 3. Input File: ${TUTORIALINPUT} + 4. Path/Value: /TUTORIAL/SOURCE/FLOW//15MIN/CONSTANT/ + 5. Sign: 1 + 6. Fillin: linear
      2. +
      3. +

        Save the current settings.

        +
      4. +
      5. +

        Add Corresponding Source information into QUAL: + Create a file called input_qual_ts_tutorial.inp.

        +
          +
        1. In input_qual_ts_tutorial.inp, create the + NODE_CONCENTRATION table
        2. +
        +
      6. +
      +

      NODE_CONCENTRATION
      +NAME NODE_NO VARIABLE FILLIN FILE PATH
      +END 

      +
        +
      1. Enter the following values into the appropriate + fields: + 1. Input Name: source1 + 2. Node: 5 + 3. Variable: ec + 4. Input File: ${TUTORIALINPUT} + 5. Path/Value: /TUTORIAL/SOURCE/EC//15MIN/CONSTANT/ + 6. Fillin: last
      2. +
      3. Add the ENVVAR definition for TUTORIALINPUT in + qual.inp
      4. +
      +

      TUTORIALINPUT ../timeseries/tutorial.dss

      +
        +
      1. In qual.inp, make sure that the file gets used:
      2. +
      +

      QUAL_TIME_SERIES
      +input_node_conc_tutorial.inp
      +input_qual_ts_tutorial.inp
      +END 

      +
        +
      1. Add Time-varying Tide Information for Downstream Boundary + in HYDRO:
          +
        1. Reopen input_hydro_ts_tutorial.inp
        2. +
        3. Create the BOUNDARY_STAGE table.
        4. +
        +
      2. +
      +

      BOUNDARY_STAGE
      +NAME NODE FILLIN FILE PATH
      +END

      +
        +
      1. +

        In the Boundary Stage table enter the following + values into the appropriate fields: + 1. Input Name: downstream_stage + 2. Node: 7 + 3. Input File: ${TUTORIALINPUT} + 4. Path/Value: /TUTORIAL/DOWNSTREAM/STAGE//15MIN/REALISTIC/ + 5. Fillin: linear

        +
      2. +
      3. +

        Add Downstream Boundary in QUAL:

        +
          +
        1. Re-open input_qual_ts_tutorial.inp.
        2. +
        3. In the Node Concentration table:
            +
          1. Enter the following values into the appropriate + fields:
              +
            1. Input Name: downstream_stage
            2. +
            3. Node: 7
            4. +
            5. Variable: ec
            6. +
            7. Input File: ${TUTORIALINPUT}
            8. +
            9. Path/Value: /TUTORIAL/DOWNSTREAM/EC//15MIN/REALISTIC/
            10. +
            11. Fillin: last
            12. +
            +
          2. +
          +
        4. +
        +
      4. +
      5. +

        Add a Gate Time Series to HYDRO:

        +
      6. +
      +

      This gate time series will control the weir. The pipe is to be +left open all the time (its default).

      +
        +
      1. Create a file for the gate input + called input gate_tutorial.inp_
      2. +
      3. Create the gate time series table INPUT_GATE:
      4. +
      5. In the table enter the following values into the + appropriate fields:
          +
        1. Gate: gate_1
        2. +
        3. Device: weir
        4. +
        5. Variable: op_from_node
        6. +
        7. Input File: ${TUTORIALINPUT}
        8. +
        9. Path/Value: /TUTORIAL/GATE/FLAP_OP//IR-YEAR/TIMEVAR/
        10. +
        11. Fillin: none (Can you tell why fillin is + "none" for this time series?)
        12. +
        +
      6. +
      7. Add the include file to hydro.inp. The time series block + should look as follows:
      8. +
      +

      HYDRO_TIME_SERIES
      +input_boundary_hydro_tutorial.inp
      +input_transfer_flow_tutorial.inp
      +input_hydro_ts_tutorial.inp
      +input_gate_tutorial.inp
      +END

      +
        +
      1. Save the current settings.
      2. +
      +

      Running HYDRO and QUAL

      +
        +
      1. In Windows Explorer, navigate to the directory: _
      2. +
      +

      .

      +
    2. +
    3. +

      Right-click on the directory, t4_timevar, and select Open + Command Window Here.

      +
    4. +
    5. +

      In the command window, type: hydro hydro.inp.

      +
    6. +
    7. +

      In the command window, type: qual qual.inp.

      +
    8. +
    9. +

      Open the output.dss file in the t4_timevar directory, and + examine the results.

      +
    10. +
    +
  6. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/Tutorial_5_Advanced_Output_and_Source_Tracking/index.html b/tutorials/Tutorial_5_Advanced_Output_and_Source_Tracking/index.html new file mode 100644 index 00000000..6d5e49f2 --- /dev/null +++ b/tutorials/Tutorial_5_Advanced_Output_and_Source_Tracking/index.html @@ -0,0 +1,696 @@ + + + + + + + + + + + + + + + + + + Tutorial 5: Advanced Output and Source Tracking - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Tutorial 5: Advanced Output and Source Tracking

+

Task

+
    +
  • Create boundary and source groups
  • +
  • Request output for constituent source tracking
  • +
+

Skills Gained
+Learn how to use advanced output options in DSM2 including source +tracking

+

The purpose of this tutorial is to provide instruction on advanced +output options in DSM2. Basic outputs include flow, stage and +constituent concentrations at nodes and channel locations. Advanced +outputs include creating output groups and source tracking.
+The first part of this tutorial involves modifications to the text input +file, hydro.inp. We will add some outputs and also take a look at how +data in hydro.inp is prioritized. The second part introduces the use +of groups for source tracking. This tutorial uses the simple channel +network shown in Figure 1.
+figure 1 +Figure 1: Simple channel with a reservoir, gate, flow transfer and +dummy reservoir.

+
    +
  1. Add Output Paths to hydro.inp:
  2. +
+

In this step of the tutorial, we will request output upstream and +downstream of the gate and reservoir 1.

+
    +
  1. +
      +
    1. In Windows Explorer, navigate to the directory, + \{DSM2_home}\tutorial\simple\t5_output.
    2. +
    3. Open the file addin.inp and note the new output paths for the + channels and reservoir.
    4. +
    5. Copy the entire file contents to the clipboard.
    6. +
    7. Open the file hydro.inp.
    8. +
    9. Navigate to the bottom of the file and paste the information. + Note that there are now two output requests for a location named + bnd_1. In hydro.inp bnd_1 is defined as channel1 location 0 + and in output_hydro_tutorial.inp it has been defined as + channel 1 location 100.
    10. +
    +
  2. +
+

For +flow data at bnd_1, will the output be written at the upstream end of +the channel (location 0) or 100ft downstream?
+Answer: The output will be for 100ft downstream because the output +request in the launch file (e.g. hydro.inp or qual.inp) supersedes +all other output requests that have the same identifier. In this case +the identifier is the NAME and VARIABLE combination (e.g. bnd_1 and +flow).
+ + How +would you get output at channel 1 and both location 0 and location +100?
+Answer: Give each location a unique identifier, eg. bnd_1 and +bnd_100.

+
    +
  1. Add Boundary and Source Groups:
  2. +
+

GROUPS are user-defined groups of model objects, for instance groups of +water bodies or groups of boundary inputs. Groups are used a number of +places in DSM2, including: tracking of constituents originated from +grouped sources, tracking of particles as they reside or move between +groups of water bodies and/or boundaries, and assignment of rate +coefficients in QUAL to groups of water bodies. In the output +specifications, groups are used to define aggregate sources for source +tracking. For example, output groups could be used to track mass +originating from all the boundaries, or from all Delta Island +Consumptive Use (DICU) diversions, etc. In this section, we will create +two output groups: boundary locations and water quality constituent +source locations.

+
    +
  1. In the study directory, create a file called + group_tutorial.inp.
  2. +
  3. In the group_tutorial.inp file, add a group table. Note that + this is a parent table for overwriting/layering purposes. Define + a boundary and a sources group:
  4. +
+

GROUP
+NAME
+boundary
+sources
+END

+
    +
  1. Now define the group members. Create the GROUP_MEMBER table + below the GROUP table:
  2. +
+

GROUP_MEMBER
+GROUP_NAME MEMBER_TYPE PATTERN
+END

+
    +
  1. In the Group Members table:
    + 1. Enter a row with the following values in the appropriate + fields:
    + 1. GROUP_NAME: boundary
    + 2. MEMBER_TYPE: stage
    + 3. PATTERN: .stream.**
    + 4. Note that the dot-star .* in the above pattern is a + "regular expression" wildcard. You can use any standard + Perl-style regular expression in groups, but the html + documentation for GROUPS describes most of the patterns + you can put in a GROUP_MEMBER that are really useful.
  2. +
+

+Look in the input_boundary_hydro_tutorial.inp file and determine what +boundary conditions are part of the boundary group based on the member +type "stage" and the pattern ".stream.".

+
    +
  1. Enter another row with the following values in the + appropriate fields:
    + 1. GROUP_NAME: boundary
    + 2. MEMBER_TYPE: flow_boundary
    + 3. PATTERN: .stream.**
  2. +
+

+Look in the input_boundary_hydro_tutorial.inp file and determine what +boundary conditions are part of the boundary group based on the member +type "flow_boundary" and the pattern ".stream.".

+
    +
  1. In the Group Members table insert another row with the + following values in the appropriate fields:
    + 1. GROUP_NAME: sources
    + 2. MEMBER_TYPE: source_sink
    + 3. PATTERN: source1
  2. +
+

+Look in the various qual input files and determine which inputs will +make up the sources group defined above.

+
    +
  1. In the qual.inp file, create the GROUPS (note the plural) + include block that will reference this file:
  2. +
+

GROUPS
+group_tutorial.inp
+END

+
    +
  1. +

    Save the current settings.

    +
  2. +
  3. +

    Source Tracking:

    +
  4. +
+

Source tracking (aka fingerprinting) determines the amount of water or +of a constituent at one location that originated from a specified +location. For constituent fingerprinting, 1) define a source group (e.g. +boundaries or DICU locations), and then 2) request output for that +group. For volumetric fingerprinting that indicates the percentage of +flow that originated from each boundary location, 1) create a +fingerprinting constituent and set its value equal to 100 at all +boundaries, 2) define a source group for all boundaries, and 3) request +output from that source group.

+
    +
  1. Add Source Tracking Output for Channel 5:
  2. +
+

To demonstrate source tracking, this part of the tutorial examines how +much of the EC in channel 5 (see Figure 1) came from the boundaries and +from other sources. For comparison purposes, the EC from all sources +will also be output.
+Create a new file called output_qual_sourcetrack.inp.

+
    +
  1. +
      +
    1. In this file, create an OUTPUT_CHANNEL_SOURCE_TRACK table. Refer + to the documentation to create the header.
    2. +
    3. In the Channel Output table create 3 rows:
        +
      1. For the first new row, enter the following values into the + appropriate fields:
          +
        1. Name: ch5
        2. +
        3. Channel: 5
        4. +
        5. Distance: 5000
        6. +
        7. Variable: ec
        8. +
        9. Source Group: all (this will track ec from all + sources)
        10. +
        11. Output File: ${QUALOUTDSSFILE}
        12. +
        13. Time Interval: 15min
        14. +
        15. Period Op: inst
        16. +
        +
      2. +
      3. For the second new row, enter the following values into the + appropriate fields:
          +
        1. Name: ch5
        2. +
        3. Channel: 5
        4. +
        5. Distance: 5000
        6. +
        7. Variable: ec
        8. +
        9. Source Group: boundary
        10. +
        11. Output File: ${QUALOUTDSSFILE}
        12. +
        13. Time Interval: 15min
        14. +
        15. Period Op: inst
        16. +
        +
      4. +
      5. For the third new row, enter the following values into the + appropriate fields:
          +
        1. Name: ch5
        2. +
        3. Channel: 5
        4. +
        5. Distance: 5000
        6. +
        7. Variable: ec
        8. +
        9. Source Group: sources
        10. +
        11. Output File: ${QUALOUTDSSFILE}
        12. +
        13. Time Interval: 15min
        14. +
        15. Period Op: inst
        16. +
        +
      6. +
      +
    4. +
    5. Save the current settings.
    6. +
    +
  2. +
  3. +

    Running HYDRO and QUAL

    +
      +
    1. Open a command window for the t5_output directory.
    2. +
    3. In the command window, type: hydro hydro.inp.
    4. +
    5. In the command window, type: qual qual.inp.
    6. +
    7. Open the hydro echo file output_tutorial_hydro_echo.inp. Which + version of bnd_1 got picked up by the model, the one in + hydro.inp or the one in output_hydro_tutorial.inp.
    8. +
    9. Open the output.dss file in the t5_output directory, and + examine the results. Do a mass balance to make sure the source + tracking adds up.
    10. +
    +
  4. +
+

+
    +
  1. Brain Teaser
  2. +
+

How would you set up a source tracking simulation to determine what +percentage of water/flow at a given location originated from a specified +boundary?

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/Tutorial_6_Operating_Rules/index.html b/tutorials/Tutorial_6_Operating_Rules/index.html new file mode 100644 index 00000000..653533af --- /dev/null +++ b/tutorials/Tutorial_6_Operating_Rules/index.html @@ -0,0 +1,862 @@ + + + + + + + + + + + + + + + + + + Tutorial 6: Operating Rules - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Tutorial 6: Operating Rules

+

Task

+
    +
  • Operate a gate based on stage criteria
  • +
  • Regulate a source/sink inflow
  • +
+

Skills Gained Get an introduction to operating rules

+

The purpose of this tutorial is to practice using Operating Rule +Language (ORL) statements to set gate operations and flows. With +operating rules, expressions can be crafted to steer the model +on-the-fly; e.g., a gate can be directed to automatically close when +stage conditions reach a certain threshold. In this tutorial we will +create operating rules to operate a gate and to regulate a source/sink +inflow.
+ +Extensive documentation on the DSM2 operating rules can be found at:
+START menu ProgramsDSM2_v8DSM2_documentation Operating +Rules

+

+Figure 1: Simple channel with a reservoir, gate, flow transfer and +dummy reservoir.

+
    +
  1. Adding a Second Gate Where Op Rule Will Be Applied
  2. +
+

In this step of the tutorial we will prepare a new layer and add the +gate that will be manipulated by the op rule.

+
    +
  1. +
      +
    1. In Windows Explorer, navigate to the directory, + \{DSM2_home}\tutorial\simple\t6_oprule.
    2. +
    3. Create a file grid_tutorial_opitems.inp.
    4. +
    5. +

      Open grid_tutorial_base.inp. We are going to copy items from + this file into the new file with minor changes:

      +
        +
      1. +

        Copy the GATE table with gate_1, paste it into + grid_tutorial_opitems.inp and change the following fields:

        +
          +
        1. +

          NAME: gate_2

          +
        2. +
        3. +

          FROM_OBJ: channel

          +
        4. +
        5. +

          FROM_IFENTIFIERS: 5

          +
        6. +
        7. +

          TO_NODE: 5

          +
            +
          1. In the Gate_Weir Devices table:
              +
            1. Copy the data from gate_1 to grid_tutorial_opitems.inp, +change the gate name to gate_2 and change the following +fields:
            2. +
            +
          2. +
          +
        8. +
        +
      2. +
      3. +
          +
        1. GATE_NAME: Gate_1
        2. +
        3. Elev: -2
        4. +
        5. Save the current settings.
        6. +
        7. Add grid_tutorial_opitems.inp to the list of included files in +hydro.inp.
        8. +
        +
      4. +
      +
    6. +
    +
  2. +
  3. +

    Adding Output for the Second Gate:

    +
      +
    1. Create a file called output_oprule_tutorial.inp.
    2. +
    3. Create the OUTPUT_GATE table:
    4. +
    +
  4. +
+

OUTPUT_GATE
+NAME GATE_NAME DEVICE VARIABLE INTERVAL PERIOD_OP FILE
+END

+
    +
  1. +
      +
    1. In the output table enter the following values into the + appropriate fields: + 1. 1. Output Name: gate_2_weirop + 2. Gate name: gate_2 + 3. Device: weir + 4. Variable: op-from-node + 5. Time Interval: 15min + 6. Period Op: inst + 7. File: ${HYDROOUTDSSFILE}
    2. +
    3. Add the following channel outputs in a new OUTPUT_CHANNEL table:
    4. +
    +
  2. +
+

OUTPUT_CHANNEL
+NAME CHAN_NO DISTANCE VARIABLE INTERVAL PERIOD_OP FILE
+trigger_loc 4 7500 stage 15min inst ${HYDROOUTDSSFILE}
+ds_gate2 5 0 flow 15min inst ${HYDROOUTDSSFILE}
+END

+
    +
  1. +
      +
    1. Add the output layer to the list of include files in hydro.inp + and save your work.
    2. +
    +
  2. +
  3. +

    Create an Operating Rule to Close the Weir when Stage is Low:

    +
  4. +
+

Now we are ready to write the first operating rule. This rule closes the +new gate we created during times where stage at a monitoring point is +low. First we will define the rule in terms of an expression called +stage_critical (the condition where stage violates a minimum) and +op_applies (a seasonal condition that is True when we are controlling +the gate for stage. In a later step we will define these variables.

+
    +
  1. +
      +
    1. Create a file called oprule_tutorial.inp.
    2. +
    3. Create the Operating Rules table:
    4. +
    +
  2. +
+

OPERATING_RULE
+NAME ACTION TRIGGER
+END

+
    +
  1. Enter the following values into the appropriate fields: + 1. Name: weir_close + 2. Action Definition: "SET gate_op(gate=gate_2, + device=weir, direction=from_node) TO CLOSE RAMP 30MIN"
  2. +
+

You +must use quotes for inputs with spaces.

+
    +
  1. +

    Trigger Definition: "stage_critical AND op_applies"

    +
      +
    1. Create an OPERATION include block in hydro.inp and add the new + file so that it will be used by DSM2-HYDRO.
    2. +
    +
  2. +
+

OPERATION
+oprule_tutorial.inp
+END

+
    +
  1. Save the current settings.
  2. +
+

Note that the expressions stage_critical and op_applies will be created +in a later step.

+
    +
  1. Create an Operating Rule to Open the Weir when Stage is High:
  2. +
+

As before, we will enter the rule to open the weir first in terms of the +expressions stage_relax (a condition where stage is safely above a +threshold where we can open the gate) and op_applies. In the next step +we will define these expressions.

+
    +
  1. +
      +
    1. In the Operating Rules table enter the following values into + the appropriate fields: + 1. Name: weir_open + 2. Action Definition: "SET gate_op(gate=gate_2, + device=weir, direction=from_node) TO OPEN RAMP 30MIN" + 3. Trigger Definition: "( stage_relax AND op_applies) OR + NOT(op_applies)"
    2. +
    3. Save the current settings.
    4. +
    5. In the hydro.inp file, add the following environmental + variables and values into the ENVVAR section:
    6. +
    +
  2. +
+

STAGE_CRITICAL 1.4
+STAGE_RELAX 1.6

+
    +
  1. Define Expressions used in the rule
      +
    1. In the file oprule_tutorial.inp, create the OPRULE_EXPRESSION + table:
    2. +
    +
  2. +
+

OPRULE_EXPRESSION
+NAME DEFINITION
+END

+
    +
  1. +
      +
    1. +
        +
      1. Enter the following values into the appropriate fields: + 1. Name: op_applies + 2. Definition: "SEASON \< 01FEB" + 2. Enter the following values into the appropriate fields. + Don't forget quotes!! + 1. Name: stage_critical + 2. Definition: "chan_stage(channel=4, dist=7500) \< + ${STAGE_CRITICAL}" + 3. Enter the following values into the appropriate fields: + 1. Name: stage_relax + 2. Definition: "chan_stage(channel=4, dist=7500) > + ${STAGE_RELAX}"
      2. +
      +
    2. +
    3. +

      Save the current settings.

      +
    4. +
    5. +

      Now run HYDRO and QUAL:

      +
        +
      1. Open a command window for the t6_oprule directory.
      2. +
      3. In the command window, type: hydro hydro.inp.
      4. +
      5. In the command window, type: qual qual.inp.
      6. +
      7. Open the output.dss file in the t6_oprule directory, and + examine the results.
      8. +
      +
    6. +
    +
  2. +
  3. +

    Add a Reduced Flow Operating Rule:

    +
  4. +
+

In our next operating rule, we will control the inflow to a node by +having it toggle back and forth between a larger "full flow" and a +"reduced flow". First we will enter the rule and then we will define the +full and reduced flows.

+
    +
  1. +
      +
    1. In the Operating Rules table enter the following values into + the appropriate fields: + 1. 1. Name: flow_reduce + 2. Action Definiton: SET ext_flow(name=source1) TO + ifelse(stage_critical,reduced_flow,full_flow) + 3. Trigger Definition: TRUE
    2. +
    3. +

      Now create the expressions that define full_flow and + reduced_flow. In the Oprule Expressions table:

      +
        +
      1. +

        Enter the following values into the appropriate fields that + define full_flow. This will involve the time series + source_flow which we will enter later:

        +
          +
        1. +

          Input Name: full_flow

          +
        2. +
        3. +

          Definition: ts(name=source_flow) [note: this is a +reference to a time series we haven't defined yet].

          +
        4. +
        +
      2. +
      3. +

        Do the same for reduced_flow. Note: we are defining + reducedflow in terms of the time series. There is no + guarantee of what order expressions will be evaluated, so + you cannot safely define _reduced_flow in terms of another + expression such as full_flow. Enter the following values + into the appropriate fields:

        +
          +
        1. Input Name: reduced_flow
        2. +
        3. Definition: 0.5*ts(name=source_flow).
        4. +
        5. Save the current settings.
        6. +
        +
      4. +
      5. Now we will define the source_flow time series upon which + the full_flow and reduced_flow expressions are based.
          +
        1. Create the Operation Time Series table:
        2. +
        +
      6. +
      +
    4. +
    +
  2. +
+

OPRULE_TIME_SERIES
+NAME FILLIN FILE PATH

+
    +
  1. +
      +
    1. +
        +
      1. +

        Enter the following values into the appropriate fields: + 1. Input Name: source_flow

        +
          +
        1. +

          Input File: ${TUTORIALINPUT}

          +
        2. +
        3. +

          Path: /TUTORIAL/SOURCE/FLOW//15MIN/CONSTANT/ [ Note: +there are two forward slashes between FLOW and +15MIN]

          +
        4. +
        5. +

          Fillin: none

          +
        6. +
        +
      2. +
      +
    2. +
    3. +

      Save the current settings.

      +
    4. +
    +
  2. +
+ + +
    +
  1. Override the Expression op_applies:
  2. +
+

Recall that op_applies is used to determine when the weir is operated. +Previously the definition of this expression was seasonal: the +expression was SEASON \< 01FEB. The goal now is to make the same +expression depend on a time series. Rather than change the expression, +we will override it in a new layer.

+
    +
  1. +
      +
    1. Add a new Operating Rules Layer: + 1. Create a file called oprule_ tutorial revision.inp_
    2. +
    3. +

      Redefine the expressions that define op_applies. In the + Expressions table:

      +
        +
      1. Create the OPRULE_EXPRESSION table.
      2. +
      3. +

        Enter the following values into the appropriate fields:

        +
          +
        1. +

          Input Name: op_applies

          +
        2. +
        3. +

          Definition: "ts(name=op_used)>0.0" [note: this is a +reference to a time series we will define in the next +step]._

          +
        4. +
        +
      4. +
      +
    4. +
    5. +

      Define the time series op_used on which the op_applies + expression depends. In the Operation Time Series table:

      +
        +
      1. Right-click and select Insert row.
      2. +
      3. Enter the following values into the appropriate fields:
          +
        1. Input Name: op_used
        2. +
        3. Input File: ${TUTORIALINPUT}
        4. +
        5. Path: /TUTORIAL/GATE/FLAP_OP//IR-YEAR/TIMEVAR/
        6. +
        7. Fillin: none
        8. +
        +
      4. +
      +
    6. +
    7. Add oprule_tutorial_revision.inp after oprule_tutorial.inp + in the OPERATIONS block of hydro.inp so that it will be used by + HYDRO.
    8. +
    9. Run HYDRO and QUAL and examine the results.
    10. +
    +
  2. +
+

Attachments:

+

+worddavd56eb5f63f4c9181cb2a8632c8c6c562.png +(image/png)
+ +worddave8e1df4e853bb46c4ee6f68afece040d.png +(image/png)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/tutorials/Tutorials/index.html b/tutorials/Tutorials/index.html new file mode 100644 index 00000000..9ef6119c --- /dev/null +++ b/tutorials/Tutorials/index.html @@ -0,0 +1,605 @@ + + + + + + + + + + + + + + + + + + + + + + Tutorials (archive) - DSM2 version 8.2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + +

Tutorials

+ +

The tutorial in PDF forms are here 

+

Don't edit these. They are only here for reference. This should be +removed once the tutorials are updated and correctly formatted.

+

Download +All

+

The presentations from the class are +here

+

Attachments:

+

DSM2 +Overview.pdf (application/pdf)
+ +DeltaTutorial7-Diurnal Pumping +CCFB.pdf (application/pdf)
+ +DeltaTutorial6-SDIP Op Rules.pdf +(application/pdf)
+ +DeltaTutorial5-Marsh Op Rules.pdf +(application/pdf)
+ +DeltaTutorial4-Batch.pdf +(application/pdf)
+ +DeltaTutorial3-Planning.pdf +(application/pdf)
+ +DeltaTutorial2-Source Tracking.pdf +(application/pdf)
+ +DeltaTutorial1-Historical.pdf +(application/pdf)
+ +BasicTutorial6-Oprule.pdf +(application/pdf)
+ +BasicTutorial5-Output.pdf +(application/pdf)
+ +BasicTutorial4-Timevar.pdf +(application/pdf)
+ +BasicTutorial3-Layering.pdf +(application/pdf)
+ +BasicTutorial2-Reservoir_Gate_Transfer.pdf +(application/pdf)
+ +BasicTutorial1-Channels.pdf +(application/pdf)
+ An +Introduction to DSM2 Tutorials.pdf +(application/pdf)

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file